From bcd9f462c2acc65b6869089943fb164329e858b2 Mon Sep 17 00:00:00 2001 From: hanchao Date: Mon, 10 Oct 2022 22:12:59 +0800 Subject: [PATCH] golang: fix CVE-2022-41715,CVE-2022-2880,CVE-2022-2879 Score: Score:CVE-2022-41715:4,CVE-2022-2880:5.3,CVE-2022-2879:6.2 Reference:https://go-review.googlesource.com/c/go/+/438501, https://go-review.googlesource.com/c/go/+/433695, https://go-review.googlesource.com/c/go/+/438500 Conflict:NA Reason: fix CVE-2022-41715,CVE-2022-2880,CVE-2022-2879 (cherry picked from commit 35fc18fe0e32f4e0889de907f6f8eb1adfe492c2) --- ...o1.18-regexp-limit-size-of-parsed-re.patch | 324 ++++++++++++++++ ...o1.18-net-http-httputil-avoid-query-.patch | 174 +++++++++ ...o1.18-archive-tar-limit-size-of-head.patch | 279 ++++++++++++++ ...-reject-query-values-with-semicolons.patch | 348 ++++++++++++++++++ golang.spec | 12 +- 5 files changed, 1136 insertions(+), 1 deletion(-) create mode 100644 0079-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch create mode 100644 0080-release-branch.go1.18-net-http-httputil-avoid-query-.patch create mode 100644 0081-release-branch.go1.18-archive-tar-limit-size-of-head.patch create mode 100644 0082-net-url-reject-query-values-with-semicolons.patch diff --git a/0079-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch b/0079-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch new file mode 100644 index 0000000..8696633 --- /dev/null +++ b/0079-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch @@ -0,0 +1,324 @@ +From 6a6f6943355ecca5fc6ca4538484edd492274b9a Mon Sep 17 00:00:00 2001 +From: Russ Cox +Date: Wed, 28 Sep 2022 11:18:51 -0400 +Subject: [PATCH] regexp: limit size of parsed regexps + +Set a 128 MB limit on the amount of space used by []syntax.Inst +in the compiled form corresponding to a given regexp. + +Also set a 128 MB limit on the rune storage in the *syntax.Regexp +tree itself. + +Thanks to Adam Korczynski (ADA Logics) and OSS-Fuzz for reporting this issue. + +Fixes CVE-2022-41715. +Updates #55949. +Fixes #55950. + +Change-Id: Ia656baed81564436368cf950e1c5409752f28e1b +Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1592136 +TryBot-Result: Security TryBots +Reviewed-by: Damien Neil +Run-TryBot: Roland Shoemaker +Reviewed-by: Julie Qiu +Reviewed-on: https://go-review.googlesource.com/c/go/+/438501 +Run-TryBot: Carlos Amedee +Reviewed-by: Carlos Amedee +Reviewed-by: Dmitri Shuralyov +TryBot-Result: Gopher Robot +Reviewed-by: Dmitri Shuralyov +--- + src/regexp/syntax/parse.go | 164 +++++++++++++++++++++++++++++--- + src/regexp/syntax/parse_test.go | 13 ++- + 2 files changed, 161 insertions(+), 16 deletions(-) + +diff --git a/src/regexp/syntax/parse.go b/src/regexp/syntax/parse.go +index d7cf2af..67254d6 100644 +--- a/src/regexp/syntax/parse.go ++++ b/src/regexp/syntax/parse.go +@@ -43,6 +43,7 @@ const ( + ErrMissingRepeatArgument ErrorCode = "missing argument to repetition operator" + ErrTrailingBackslash ErrorCode = "trailing backslash at end of expression" + ErrUnexpectedParen ErrorCode = "unexpected )" ++ ErrNestingDepth ErrorCode = "expression nests too deeply" + ) + + func (e ErrorCode) String() string { +@@ -90,15 +91,49 @@ const ( + // until we've allocated at least maxHeight Regexp structures. + const maxHeight = 1000 + ++// maxSize is the maximum size of a compiled regexp in Insts. ++// It too is somewhat arbitrarily chosen, but the idea is to be large enough ++// to allow significant regexps while at the same time small enough that ++// the compiled form will not take up too much memory. ++// 128 MB is enough for a 3.3 million Inst structures, which roughly ++// corresponds to a 3.3 MB regexp. ++const ( ++ maxSize = 128 << 20 / instSize ++ instSize = 5 * 8 // byte, 2 uint32, slice is 5 64-bit words ++) ++ ++// maxRunes is the maximum number of runes allowed in a regexp tree ++// counting the runes in all the nodes. ++// Ignoring character classes p.numRunes is always less than the length of the regexp. ++// Character classes can make it much larger: each \pL adds 1292 runes. ++// 128 MB is enough for 32M runes, which is over 26k \pL instances. ++// Note that repetitions do not make copies of the rune slices, ++// so \pL{1000} is only one rune slice, not 1000. ++// We could keep a cache of character classes we've seen, ++// so that all the \pL we see use the same rune list, ++// but that doesn't remove the problem entirely: ++// consider something like [\pL01234][\pL01235][\pL01236]...[\pL^&*()]. ++// And because the Rune slice is exposed directly in the Regexp, ++// there is not an opportunity to change the representation to allow ++// partial sharing between different character classes. ++// So the limit is the best we can do. ++const ( ++ maxRunes = 128 << 20 / runeSize ++ runeSize = 4 // rune is int32 ++) ++ + type parser struct { + flags Flags // parse mode flags + stack []*Regexp // stack of parsed expressions + free *Regexp + numCap int // number of capturing groups seen + wholeRegexp string +- tmpClass []rune // temporary char class work space +- numRegexp int // number of regexps allocated +- height map[*Regexp]int // regexp height for height limit check ++ tmpClass []rune // temporary char class work space ++ numRegexp int // number of regexps allocated ++ numRunes int // number of runes in char classes ++ repeats int64 // product of all repetitions seen ++ height map[*Regexp]int // regexp height, for height limit check ++ size map[*Regexp]int64 // regexp compiled size, for size limit check + } + + func (p *parser) newRegexp(op Op) *Regexp { +@@ -122,6 +157,104 @@ func (p *parser) reuse(re *Regexp) { + p.free = re + } + ++func (p *parser) checkLimits(re *Regexp) { ++ if p.numRunes > maxRunes { ++ panic(ErrInternalError) ++ } ++ p.checkSize(re) ++ p.checkHeight(re) ++} ++ ++func (p *parser) checkSize(re *Regexp) { ++ if p.size == nil { ++ // We haven't started tracking size yet. ++ // Do a relatively cheap check to see if we need to start. ++ // Maintain the product of all the repeats we've seen ++ // and don't track if the total number of regexp nodes ++ // we've seen times the repeat product is in budget. ++ if p.repeats == 0 { ++ p.repeats = 1 ++ } ++ if re.Op == OpRepeat { ++ n := re.Max ++ if n == -1 { ++ n = re.Min ++ } ++ if n <= 0 { ++ n = 1 ++ } ++ if int64(n) > maxSize/p.repeats { ++ p.repeats = maxSize ++ } else { ++ p.repeats *= int64(n) ++ } ++ } ++ if int64(p.numRegexp) < maxSize/p.repeats { ++ return ++ } ++ ++ // We need to start tracking size. ++ // Make the map and belatedly populate it ++ // with info about everything we've constructed so far. ++ p.size = make(map[*Regexp]int64) ++ for _, re := range p.stack { ++ p.checkSize(re) ++ } ++ } ++ ++ if p.calcSize(re, true) > maxSize { ++ panic(ErrInternalError) ++ } ++} ++ ++func (p *parser) calcSize(re *Regexp, force bool) int64 { ++ if !force { ++ if size, ok := p.size[re]; ok { ++ return size ++ } ++ } ++ ++ var size int64 ++ switch re.Op { ++ case OpLiteral: ++ size = int64(len(re.Rune)) ++ case OpCapture, OpStar: ++ // star can be 1+ or 2+; assume 2 pessimistically ++ size = 2 + p.calcSize(re.Sub[0], false) ++ case OpPlus, OpQuest: ++ size = 1 + p.calcSize(re.Sub[0], false) ++ case OpConcat: ++ for _, sub := range re.Sub { ++ size += p.calcSize(sub, false) ++ } ++ case OpAlternate: ++ for _, sub := range re.Sub { ++ size += p.calcSize(sub, false) ++ } ++ if len(re.Sub) > 1 { ++ size += int64(len(re.Sub)) - 1 ++ } ++ case OpRepeat: ++ sub := p.calcSize(re.Sub[0], false) ++ if re.Max == -1 { ++ if re.Min == 0 { ++ size = 2 + sub // x* ++ } else { ++ size = 1 + int64(re.Min)*sub // xxx+ ++ } ++ break ++ } ++ // x{2,5} = xx(x(x(x)?)?)? ++ size = int64(re.Max)*sub + int64(re.Max-re.Min) ++ } ++ ++ if size < 1 { ++ size = 1 ++ } ++ p.size[re] = size ++ return size ++} ++ + func (p *parser) checkHeight(re *Regexp) { + if p.numRegexp < maxHeight { + return +@@ -133,7 +266,7 @@ func (p *parser) checkHeight(re *Regexp) { + } + } + if p.calcHeight(re, true) > maxHeight { +- panic(ErrInternalError) ++ panic(ErrNestingDepth) + } + } + +@@ -158,6 +291,7 @@ func (p *parser) calcHeight(re *Regexp, force bool) int { + + // push pushes the regexp re onto the parse stack and returns the regexp. + func (p *parser) push(re *Regexp) *Regexp { ++ p.numRunes += len(re.Rune) + if re.Op == OpCharClass && len(re.Rune) == 2 && re.Rune[0] == re.Rune[1] { + // Single rune. + if p.maybeConcat(re.Rune[0], p.flags&^FoldCase) { +@@ -189,7 +323,7 @@ func (p *parser) push(re *Regexp) *Regexp { + } + + p.stack = append(p.stack, re) +- p.checkHeight(re) ++ p.checkLimits(re) + return re + } + +@@ -299,7 +433,7 @@ func (p *parser) repeat(op Op, min, max int, before, after, lastRepeat string) ( + re.Sub = re.Sub0[:1] + re.Sub[0] = sub + p.stack[n-1] = re +- p.checkHeight(re) ++ p.checkLimits(re) + + if op == OpRepeat && (min >= 2 || max >= 2) && !repeatIsValid(re, 1000) { + return "", &Error{ErrInvalidRepeatSize, before[:len(before)-len(after)]} +@@ -444,12 +578,16 @@ func (p *parser) collapse(subs []*Regexp, op Op) *Regexp { + // frees (passes to p.reuse) any removed *Regexps. + // + // For example, +-// ABC|ABD|AEF|BCX|BCY ++// ++// ABC|ABD|AEF|BCX|BCY ++// + // simplifies by literal prefix extraction to +-// A(B(C|D)|EF)|BC(X|Y) ++// ++// A(B(C|D)|EF)|BC(X|Y) ++// + // which simplifies by character class introduction to +-// A(B[CD]|EF)|BC[XY] + // ++// A(B[CD]|EF)|BC[XY] + func (p *parser) factor(sub []*Regexp) []*Regexp { + if len(sub) < 2 { + return sub +@@ -503,6 +641,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp { + + for j := start; j < i; j++ { + sub[j] = p.removeLeadingString(sub[j], len(str)) ++ p.checkLimits(sub[j]) + } + suffix := p.collapse(sub[start:i], OpAlternate) // recurse + +@@ -560,6 +699,7 @@ func (p *parser) factor(sub []*Regexp) []*Regexp { + for j := start; j < i; j++ { + reuse := j != start // prefix came from sub[start] + sub[j] = p.removeLeadingRegexp(sub[j], reuse) ++ p.checkLimits(sub[j]) + } + suffix := p.collapse(sub[start:i], OpAlternate) // recurse + +@@ -757,8 +897,10 @@ func parse(s string, flags Flags) (_ *Regexp, err error) { + panic(r) + case nil: + // ok +- case ErrInternalError: ++ case ErrInternalError: // too big + err = &Error{Code: ErrInternalError, Expr: s} ++ case ErrNestingDepth: ++ err = &Error{Code: ErrNestingDepth, Expr: s} + } + }() + +@@ -1801,7 +1943,7 @@ func appendClass(r []rune, x []rune) []rune { + return r + } + +-// appendFolded returns the result of appending the case folding of the class x to the class r. ++// appendFoldedClass returns the result of appending the case folding of the class x to the class r. + func appendFoldedClass(r []rune, x []rune) []rune { + for i := 0; i < len(x); i += 2 { + r = appendFoldedRange(r, x[i], x[i+1]) +diff --git a/src/regexp/syntax/parse_test.go b/src/regexp/syntax/parse_test.go +index 1ef6d8a..67e3c56 100644 +--- a/src/regexp/syntax/parse_test.go ++++ b/src/regexp/syntax/parse_test.go +@@ -484,12 +484,15 @@ var invalidRegexps = []string{ + `(?P<>a)`, + `[a-Z]`, + `(?i)[a-Z]`, +- `a{100000}`, +- `a{100000,}`, +- "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})", +- strings.Repeat("(", 1000) + strings.Repeat(")", 1000), +- strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000), + `\Q\E*`, ++ `a{100000}`, // too much repetition ++ `a{100000,}`, // too much repetition ++ "((((((((((x{2}){2}){2}){2}){2}){2}){2}){2}){2}){2})", // too much repetition ++ strings.Repeat("(", 1000) + strings.Repeat(")", 1000), // too deep ++ strings.Repeat("(?:", 1000) + strings.Repeat(")*", 1000), // too deep ++ "(" + strings.Repeat("(xx?)", 1000) + "){1000}", // too long ++ strings.Repeat("(xx?){1000}", 1000), // too long ++ strings.Repeat(`\pL`, 270000), // too many runes + } + + var onlyPerl = []string{ +-- +2.33.0 + diff --git a/0080-release-branch.go1.18-net-http-httputil-avoid-query-.patch b/0080-release-branch.go1.18-net-http-httputil-avoid-query-.patch new file mode 100644 index 0000000..df193bc --- /dev/null +++ b/0080-release-branch.go1.18-net-http-httputil-avoid-query-.patch @@ -0,0 +1,174 @@ +From 61cdfc54fe4d7dd2179d3d6ae1d3ebb5709daad7 Mon Sep 17 00:00:00 2001 +From: Damien Neil +Date: Thu, 22 Sep 2022 13:32:00 -0700 +Subject: [PATCH] [release-branch.go1.18] net/http/httputil: avoid query + parameter smuggling + +Query parameter smuggling occurs when a proxy's interpretation +of query parameters differs from that of a downstream server. +Change ReverseProxy to avoid forwarding ignored query parameters. + +Remove unparsable query parameters from the outbound request + + * if req.Form != nil after calling ReverseProxy.Director; and + * before calling ReverseProxy.Rewrite. + +This change preserves the existing behavior of forwarding the +raw query untouched if a Director hook does not parse the query +by calling Request.ParseForm (possibly indirectly). + +Fixes #55842 +For #54663 +For CVE-2022-2880 + +Change-Id: If1621f6b0e73a49d79059dae9e6b256e0ff18ca9 +Reviewed-on: https://go-review.googlesource.com/c/go/+/432976 +Reviewed-by: Roland Shoemaker +Reviewed-by: Brad Fitzpatrick +TryBot-Result: Gopher Robot +Run-TryBot: Damien Neil +(cherry picked from commit 7c84234142149bd24a4096c6cab691d3593f3431) +Reviewed-on: https://go-review.googlesource.com/c/go/+/433695 +Reviewed-by: Dmitri Shuralyov +Reviewed-by: Dmitri Shuralyov +--- + src/net/http/httputil/reverseproxy.go | 36 +++++++++++ + src/net/http/httputil/reverseproxy_test.go | 74 ++++++++++++++++++++++ + 2 files changed, 110 insertions(+) + +diff --git a/src/net/http/httputil/reverseproxy.go b/src/net/http/httputil/reverseproxy.go +index 68754cb088..5b42b76f37 100644 +--- a/src/net/http/httputil/reverseproxy.go ++++ b/src/net/http/httputil/reverseproxy.go +@@ -248,6 +248,9 @@ func (p *ReverseProxy) ServeHTTP(rw http.ResponseWriter, req *http.Request) { + } + + p.Director(outreq) ++ if outreq.Form != nil { ++ outreq.URL.RawQuery = cleanQueryParams(outreq.URL.RawQuery) ++ } + outreq.Close = false + + reqUpType := upgradeType(outreq.Header) +@@ -614,3 +617,36 @@ func (c switchProtocolCopier) copyToBackend(errc chan<- error) { + _, err := io.Copy(c.backend, c.user) + errc <- err + } ++ ++func cleanQueryParams(s string) string { ++ reencode := func(s string) string { ++ v, _ := url.ParseQuery(s) ++ return v.Encode() ++ } ++ for i := 0; i < len(s); { ++ switch s[i] { ++ case ';': ++ return reencode(s) ++ case '%': ++ if i+2 >= len(s) || !ishex(s[i+1]) || !ishex(s[i+2]) { ++ return reencode(s) ++ } ++ i += 3 ++ default: ++ i++ ++ } ++ } ++ return s ++} ++ ++func ishex(c byte) bool { ++ switch { ++ case '0' <= c && c <= '9': ++ return true ++ case 'a' <= c && c <= 'f': ++ return true ++ case 'A' <= c && c <= 'F': ++ return true ++ } ++ return false ++} +diff --git a/src/net/http/httputil/reverseproxy_test.go b/src/net/http/httputil/reverseproxy_test.go +index 1f2dfb9867..3dfd24df35 100644 +--- a/src/net/http/httputil/reverseproxy_test.go ++++ b/src/net/http/httputil/reverseproxy_test.go +@@ -1460,3 +1460,77 @@ func TestJoinURLPath(t *testing.T) { + } + } + } ++ ++const ( ++ testWantsCleanQuery = true ++ testWantsRawQuery = false ++) ++ ++func TestReverseProxyQueryParameterSmugglingDirectorDoesNotParseForm(t *testing.T) { ++ testReverseProxyQueryParameterSmuggling(t, testWantsRawQuery, func(u *url.URL) *ReverseProxy { ++ proxyHandler := NewSingleHostReverseProxy(u) ++ oldDirector := proxyHandler.Director ++ proxyHandler.Director = func(r *http.Request) { ++ oldDirector(r) ++ } ++ return proxyHandler ++ }) ++} ++ ++func TestReverseProxyQueryParameterSmugglingDirectorParsesForm(t *testing.T) { ++ testReverseProxyQueryParameterSmuggling(t, testWantsCleanQuery, func(u *url.URL) *ReverseProxy { ++ proxyHandler := NewSingleHostReverseProxy(u) ++ oldDirector := proxyHandler.Director ++ proxyHandler.Director = func(r *http.Request) { ++ // Parsing the form causes ReverseProxy to remove unparsable ++ // query parameters before forwarding. ++ r.FormValue("a") ++ oldDirector(r) ++ } ++ return proxyHandler ++ }) ++} ++ ++func testReverseProxyQueryParameterSmuggling(t *testing.T, wantCleanQuery bool, newProxy func(*url.URL) *ReverseProxy) { ++ const content = "response_content" ++ backend := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ++ w.Write([]byte(r.URL.RawQuery)) ++ })) ++ defer backend.Close() ++ backendURL, err := url.Parse(backend.URL) ++ if err != nil { ++ t.Fatal(err) ++ } ++ proxyHandler := newProxy(backendURL) ++ frontend := httptest.NewServer(proxyHandler) ++ defer frontend.Close() ++ ++ // Don't spam output with logs of queries containing semicolons. ++ backend.Config.ErrorLog = log.New(io.Discard, "", 0) ++ frontend.Config.ErrorLog = log.New(io.Discard, "", 0) ++ ++ for _, test := range []struct { ++ rawQuery string ++ cleanQuery string ++ }{{ ++ rawQuery: "a=1&a=2;b=3", ++ cleanQuery: "a=1", ++ }, { ++ rawQuery: "a=1&a=%zz&b=3", ++ cleanQuery: "a=1&b=3", ++ }} { ++ res, err := frontend.Client().Get(frontend.URL + "?" + test.rawQuery) ++ if err != nil { ++ t.Fatalf("Get: %v", err) ++ } ++ defer res.Body.Close() ++ body, _ := io.ReadAll(res.Body) ++ wantQuery := test.rawQuery ++ if wantCleanQuery { ++ wantQuery = test.cleanQuery ++ } ++ if got, want := string(body), wantQuery; got != want { ++ t.Errorf("proxy forwarded raw query %q as %q, want %q", test.rawQuery, got, want) ++ } ++ } ++} +-- +2.33.0 + diff --git a/0081-release-branch.go1.18-archive-tar-limit-size-of-head.patch b/0081-release-branch.go1.18-archive-tar-limit-size-of-head.patch new file mode 100644 index 0000000..93d5202 --- /dev/null +++ b/0081-release-branch.go1.18-archive-tar-limit-size-of-head.patch @@ -0,0 +1,279 @@ +From 61088cf9ed18b7b03dad384f2691a17f85fc24c5 Mon Sep 17 00:00:00 2001 +From: Damien Neil +Date: Fri, 2 Sep 2022 20:45:18 -0700 +Subject: [PATCH] archive/tar: limit size of headers + +Set a 1MiB limit on special file blocks (PAX headers, GNU long names, +GNU link names), to avoid reading arbitrarily large amounts of data +into memory. + +Thanks to Adam Korczynski (ADA Logics) and OSS-Fuzz for reporting +this issue. + +Fixes CVE-2022-2879 +Updates #54853 +Fixes #55925 + +Change-Id: I85136d6ff1e0af101a112190e027987ab4335680 +Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1565555 +Reviewed-by: Tatiana Bradley +Run-TryBot: Roland Shoemaker +Reviewed-by: Roland Shoemaker +(cherry picked from commit 6ee768cef6b82adf7a90dcf367a1699ef694f3b2) +Reviewed-on: https://team-review.git.corp.google.com/c/golang/go-private/+/1590622 +Reviewed-by: Damien Neil +Reviewed-by: Julie Qiu +Reviewed-on: https://go-review.googlesource.com/c/go/+/438500 +Reviewed-by: Dmitri Shuralyov +Reviewed-by: Carlos Amedee +Reviewed-by: Dmitri Shuralyov +Run-TryBot: Carlos Amedee +TryBot-Result: Gopher Robot +--- + src/archive/tar/format.go | 4 +++ + src/archive/tar/reader.go | 20 ++++++++--- + src/archive/tar/reader_test.go | 11 +++++- + src/archive/tar/writer.go | 3 ++ + src/archive/tar/writer_test.go | 27 ++++++++++++++ + src/io/io.go | 64 ++++++++++++++++++++++++++++++++++ + 6 files changed, 123 insertions(+), 6 deletions(-) + +diff --git a/src/archive/tar/format.go b/src/archive/tar/format.go +index cfe24a5..6642364 100644 +--- a/src/archive/tar/format.go ++++ b/src/archive/tar/format.go +@@ -143,6 +143,10 @@ const ( + blockSize = 512 // Size of each block in a tar stream + nameSize = 100 // Max length of the name field in USTAR format + prefixSize = 155 // Max length of the prefix field in USTAR format ++ ++ // Max length of a special file (PAX header, GNU long name or link). ++ // This matches the limit used by libarchive. ++ maxSpecialFileSize = 1 << 20 + ) + + // blockPadding computes the number of bytes needed to pad offset up to the +diff --git a/src/archive/tar/reader.go b/src/archive/tar/reader.go +index 4f9135b..ec45ae3 100644 +--- a/src/archive/tar/reader.go ++++ b/src/archive/tar/reader.go +@@ -104,7 +104,7 @@ func (tr *Reader) next() (*Header, error) { + continue // This is a meta header affecting the next header + case TypeGNULongName, TypeGNULongLink: + format.mayOnlyBe(FormatGNU) +- realname, err := ioutil.ReadAll(tr) ++ realname, err := readSpecialFile(tr) + if err != nil { + return nil, err + } +@@ -294,7 +294,7 @@ func mergePAX(hdr *Header, paxHdrs map[string]string) (err error) { + // parsePAX parses PAX headers. + // If an extended header (type 'x') is invalid, ErrHeader is returned + func parsePAX(r io.Reader) (map[string]string, error) { +- buf, err := ioutil.ReadAll(r) ++ buf, err := readSpecialFile(r) + if err != nil { + return nil, err + } +@@ -337,9 +337,9 @@ func parsePAX(r io.Reader) (map[string]string, error) { + // header in case further processing is required. + // + // The err will be set to io.EOF only when one of the following occurs: +-// * Exactly 0 bytes are read and EOF is hit. +-// * Exactly 1 block of zeros is read and EOF is hit. +-// * At least 2 blocks of zeros are read. ++// - Exactly 0 bytes are read and EOF is hit. ++// - Exactly 1 block of zeros is read and EOF is hit. ++// - At least 2 blocks of zeros are read. + func (tr *Reader) readHeader() (*Header, *block, error) { + // Two blocks of zero bytes marks the end of the archive. + if _, err := io.ReadFull(tr.r, tr.blk[:]); err != nil { +@@ -827,6 +827,16 @@ func tryReadFull(r io.Reader, b []byte) (n int, err error) { + return n, err + } + ++// readSpecialFile is like io.ReadAll except it returns ++// ErrFieldTooLong if more than maxSpecialFileSize is read. ++func readSpecialFile(r io.Reader) ([]byte, error) { ++ buf, err := io.ReadAll(io.LimitReader(r, maxSpecialFileSize+1)) ++ if len(buf) > maxSpecialFileSize { ++ return nil, ErrFieldTooLong ++ } ++ return buf, err ++} ++ + // discard skips n bytes in r, reporting an error if unable to do so. + func discard(r io.Reader, n int64) error { + // If possible, Seek to the last byte before the end of the data section. +diff --git a/src/archive/tar/reader_test.go b/src/archive/tar/reader_test.go +index f153b66..c68a859 100644 +--- a/src/archive/tar/reader_test.go ++++ b/src/archive/tar/reader_test.go +@@ -6,6 +6,7 @@ package tar + + import ( + "bytes" ++ "compress/bzip2" + "crypto/md5" + "errors" + "fmt" +@@ -244,6 +245,9 @@ func TestReader(t *testing.T) { + }, { + file: "testdata/pax-bad-hdr-file.tar", + err: ErrHeader, ++ }, { ++ file: "testdata/pax-bad-hdr-large.tar.bz2", ++ err: ErrFieldTooLong, + }, { + file: "testdata/pax-bad-mtime-file.tar", + err: ErrHeader, +@@ -626,9 +630,14 @@ func TestReader(t *testing.T) { + } + defer f.Close() + ++ var fr io.Reader = f ++ if strings.HasSuffix(v.file, ".bz2") { ++ fr = bzip2.NewReader(fr) ++ } ++ + // Capture all headers and checksums. + var ( +- tr = NewReader(f) ++ tr = NewReader(fr) + hdrs []*Header + chksums []string + rdbuf = make([]byte, 8) +diff --git a/src/archive/tar/writer.go b/src/archive/tar/writer.go +index e80498d..893eac0 100644 +--- a/src/archive/tar/writer.go ++++ b/src/archive/tar/writer.go +@@ -199,6 +199,9 @@ func (tw *Writer) writePAXHeader(hdr *Header, paxHdrs map[string]string) error { + flag = TypeXHeader + } + data := buf.String() ++ if len(data) > maxSpecialFileSize { ++ return ErrFieldTooLong ++ } + if err := tw.writeRawFile(name, data, flag, FormatPAX); err != nil || isGlobal { + return err // Global headers return here + } +diff --git a/src/archive/tar/writer_test.go b/src/archive/tar/writer_test.go +index 30556d2..4bd69fd 100644 +--- a/src/archive/tar/writer_test.go ++++ b/src/archive/tar/writer_test.go +@@ -1007,6 +1007,33 @@ func TestIssue12594(t *testing.T) { + } + } + ++func TestWriteLongHeader(t *testing.T) { ++ for _, test := range []struct { ++ name string ++ h *Header ++ }{{ ++ name: "name too long", ++ h: &Header{Name: strings.Repeat("a", maxSpecialFileSize)}, ++ }, { ++ name: "linkname too long", ++ h: &Header{Linkname: strings.Repeat("a", maxSpecialFileSize)}, ++ }, { ++ name: "uname too long", ++ h: &Header{Uname: strings.Repeat("a", maxSpecialFileSize)}, ++ }, { ++ name: "gname too long", ++ h: &Header{Gname: strings.Repeat("a", maxSpecialFileSize)}, ++ }, { ++ name: "PAX header too long", ++ h: &Header{PAXRecords: map[string]string{"GOLANG.x": strings.Repeat("a", maxSpecialFileSize)}}, ++ }} { ++ w := NewWriter(io.Discard) ++ if err := w.WriteHeader(test.h); err != ErrFieldTooLong { ++ t.Errorf("%v: w.WriteHeader() = %v, want ErrFieldTooLong", test.name, err) ++ } ++ } ++} ++ + // testNonEmptyWriter wraps an io.Writer and ensures that + // Write is never called with an empty buffer. + type testNonEmptyWriter struct{ io.Writer } +diff --git a/src/io/io.go b/src/io/io.go +index 3dea70b..f611ec9 100644 +--- a/src/io/io.go ++++ b/src/io/io.go +@@ -14,6 +14,7 @@ package io + + import ( + "errors" ++ "sync" + ) + + // Seek whence values. +@@ -547,3 +548,66 @@ func (t *teeReader) Read(p []byte) (n int, err error) { + } + return + } ++ ++// Discard is a Writer on which all Write calls succeed ++// without doing anything. ++var Discard Writer = discard{} ++ ++type discard struct{} ++ ++// discard implements ReaderFrom as an optimization so Copy to ++// io.Discard can avoid doing unnecessary work. ++var _ ReaderFrom = discard{} ++ ++func (discard) Write(p []byte) (int, error) { ++ return len(p), nil ++} ++ ++func (discard) WriteString(s string) (int, error) { ++ return len(s), nil ++} ++ ++var blackHolePool = sync.Pool{ ++ New: func() interface{} { ++ b := make([]byte, 8192) ++ return &b ++ }, ++} ++ ++func (discard) ReadFrom(r Reader) (n int64, err error) { ++ bufp := blackHolePool.Get().(*[]byte) ++ readSize := 0 ++ for { ++ readSize, err = r.Read(*bufp) ++ n += int64(readSize) ++ if err != nil { ++ blackHolePool.Put(bufp) ++ if err == EOF { ++ return n, nil ++ } ++ return ++ } ++ } ++} ++ ++// ReadAll reads from r until an error or EOF and returns the data it read. ++// A successful call returns err == nil, not err == EOF. Because ReadAll is ++// defined to read from src until EOF, it does not treat an EOF from Read ++// as an error to be reported. ++func ReadAll(r Reader) ([]byte, error) { ++ b := make([]byte, 0, 512) ++ for { ++ if len(b) == cap(b) { ++ // Add more capacity (let append pick how much). ++ b = append(b, 0)[:len(b)] ++ } ++ n, err := r.Read(b[len(b):cap(b)]) ++ b = b[:len(b)+n] ++ if err != nil { ++ if err == EOF { ++ err = nil ++ } ++ return b, err ++ } ++ } ++} +-- +2.33.0 + diff --git a/0082-net-url-reject-query-values-with-semicolons.patch b/0082-net-url-reject-query-values-with-semicolons.patch new file mode 100644 index 0000000..5916a55 --- /dev/null +++ b/0082-net-url-reject-query-values-with-semicolons.patch @@ -0,0 +1,348 @@ +From 5149d86130fc0a57856f2dede41f0c3f14214ec3 Mon Sep 17 00:00:00 2001 +From: Katie Hockman +Date: Mon, 7 Jun 2021 14:29:43 -0400 +Subject: [PATCH] net/url: reject query values with semicolons + +Semicolons are no longer valid separators, so +net/url.ParseQuery will now return an error +if any part of the query contains a semicolon. + +net/http.(*Request).ParseMultipartForm has been +changed to fall through and continue parsing +even if the call to (*Request).ParseForm fails. + +This change also includes a few minor refactors +to existing tests. + +Fixes #25192 + +Change-Id: Iba3f108950fb99b9288e402c41fe71ca3a2ababd +Reviewed-on: https://go-review.googlesource.com/c/go/+/325697 +Trust: Katie Hockman +Run-TryBot: Katie Hockman +TryBot-Result: Go Bot +Reviewed-by: Filippo Valsorda +--- + src/net/http/request.go | 12 ++-- + src/net/http/request_test.go | 31 +++++++++- + src/net/http/server.go | 5 ++ + src/net/url/example_test.go | 4 +- + src/net/url/url.go | 13 ++-- + src/net/url/url_test.go | 116 +++++++++++++++++++++++++++-------- + 6 files changed, 145 insertions(+), 36 deletions(-) + +diff --git a/src/net/http/request.go b/src/net/http/request.go +index 54ec1c5593..bc28db95e3 100644 +--- a/src/net/http/request.go ++++ b/src/net/http/request.go +@@ -1273,16 +1273,18 @@ func (r *Request) ParseForm() error { + // its file parts are stored in memory, with the remainder stored on + // disk in temporary files. + // ParseMultipartForm calls ParseForm if necessary. ++// If ParseForm returns an error, ParseMultipartForm returns it but also ++// continues parsing the request body. + // After one call to ParseMultipartForm, subsequent calls have no effect. + func (r *Request) ParseMultipartForm(maxMemory int64) error { + if r.MultipartForm == multipartByReader { + return errors.New("http: multipart handled by MultipartReader") + } ++ var parseFormErr error + if r.Form == nil { +- err := r.ParseForm() +- if err != nil { +- return err +- } ++ // Let errors in ParseForm fall through, and just ++ // return it at the end. ++ parseFormErr = r.ParseForm() + } + if r.MultipartForm != nil { + return nil +@@ -1309,7 +1311,7 @@ func (r *Request) ParseMultipartForm(maxMemory int64) error { + + r.MultipartForm = f + +- return nil ++ return parseFormErr + } + + // FormValue returns the first value for the named component of the query. +diff --git a/src/net/http/request_test.go b/src/net/http/request_test.go +index 461d66e05d..72316c4e21 100644 +--- a/src/net/http/request_test.go ++++ b/src/net/http/request_test.go +@@ -32,9 +32,26 @@ func TestQuery(t *testing.T) { + } + } + ++// Issue #25192: Test that ParseForm fails but still parses the form when an URL ++// containing a semicolon is provided. ++func TestParseFormSemicolonSeparator(t *testing.T) { ++ for _, method := range []string{"POST", "PATCH", "PUT", "GET"} { ++ req, _ := NewRequest(method, "http://www.google.com/search?q=foo;q=bar&a=1", ++ strings.NewReader("q")) ++ err := req.ParseForm() ++ if err == nil { ++ t.Fatalf(`for method %s, ParseForm expected an error, got success`, method) ++ } ++ wantForm := url.Values{"a": []string{"1"}} ++ if !reflect.DeepEqual(req.Form, wantForm) { ++ t.Fatalf("for method %s, ParseForm expected req.Form = %v, want %v", method, req.Form, wantForm) ++ } ++ } ++} ++ + func TestParseFormQuery(t *testing.T) { + req, _ := NewRequest("POST", "http://www.google.com/search?q=foo&q=bar&both=x&prio=1&orphan=nope&empty=not", +- strings.NewReader("z=post&both=y&prio=2&=nokey&orphan;empty=&")) ++ strings.NewReader("z=post&both=y&prio=2&=nokey&orphan&empty=&")) + req.Header.Set("Content-Type", "application/x-www-form-urlencoded; param=value") + + if q := req.FormValue("q"); q != "foo" { +@@ -298,6 +315,18 @@ func TestMultipartRequest(t *testing.T) { + validateTestMultipartContents(t, req, false) + } + ++// Issue #25192: Test that ParseMultipartForm fails but still parses the ++// multi-part form when an URL containing a semicolon is provided. ++func TestParseMultipartFormSemicolonSeparator(t *testing.T) { ++ req := newTestMultipartRequest(t) ++ req.URL = &url.URL{RawQuery: "q=foo;q=bar"} ++ if err := req.ParseMultipartForm(25); err == nil { ++ t.Fatal("ParseMultipartForm expected error due to invalid semicolon, got nil") ++ } ++ defer req.MultipartForm.RemoveAll() ++ validateTestMultipartContents(t, req, false) ++} ++ + func TestMultipartRequestAuto(t *testing.T) { + // Test that FormValue and FormFile automatically invoke + // ParseMultipartForm and return the right values. +diff --git a/src/net/http/server.go b/src/net/http/server.go +index 6948ff4dd0..d0c82a183c 100644 +--- a/src/net/http/server.go ++++ b/src/net/http/server.go +@@ -2857,6 +2857,11 @@ func (sh serverHandler) ServeHTTP(rw ResponseWriter, req *Request) { + handler = globalOptionsHandler{} + } + handler.ServeHTTP(rw, req) ++ if req.URL != nil && strings.Contains(req.URL.RawQuery, ";") { ++ // TODO(filippo): update this not to log if the special ++ // semicolon handler was called. ++ sh.srv.logf("http: URL query contains semicolon, which is no longer a supported separator; parts of the query may be stripped when parsed; see golang.org/issue/25192") ++ } + } + + // ListenAndServe listens on the TCP network address srv.Addr and then +diff --git a/src/net/url/example_test.go b/src/net/url/example_test.go +index cb9e8922a2..476132a1c9 100644 +--- a/src/net/url/example_test.go ++++ b/src/net/url/example_test.go +@@ -72,13 +72,13 @@ func ExampleURL_ResolveReference() { + } + + func ExampleParseQuery() { +- m, err := url.ParseQuery(`x=1&y=2&y=3;z`) ++ m, err := url.ParseQuery(`x=1&y=2&y=3`) + if err != nil { + log.Fatal(err) + } + fmt.Println(toJSON(m)) + // Output: +- // {"x":["1"], "y":["2", "3"], "z":[""]} ++ // {"x":["1"], "y":["2", "3"]} + } + + func ExampleURL_EscapedPath() { +diff --git a/src/net/url/url.go b/src/net/url/url.go +index c93def0bd7..413236f843 100644 +--- a/src/net/url/url.go ++++ b/src/net/url/url.go +@@ -915,9 +915,10 @@ func (v Values) Del(key string) { + // valid query parameters found; err describes the first decoding error + // encountered, if any. + // +-// Query is expected to be a list of key=value settings separated by +-// ampersands or semicolons. A setting without an equals sign is +-// interpreted as a key set to an empty value. ++// Query is expected to be a list of key=value settings separated by ampersands. ++// A setting without an equals sign is interpreted as a key set to an empty ++// value. ++// Settings containing a non-URL-encoded semicolon are considered invalid. + func ParseQuery(query string) (Values, error) { + m := make(Values) + err := parseQuery(m, query) +@@ -927,11 +928,15 @@ func ParseQuery(query string) (Values, error) { + func parseQuery(m Values, query string) (err error) { + for query != "" { + key := query +- if i := strings.IndexAny(key, "&;"); i >= 0 { ++ if i := strings.IndexAny(key, "&"); i >= 0 { + key, query = key[:i], key[i+1:] + } else { + query = "" + } ++ if strings.Contains(key, ";") { ++ err = fmt.Errorf("invalid semicolon separator in query") ++ continue ++ } + if key == "" { + continue + } +diff --git a/src/net/url/url_test.go b/src/net/url/url_test.go +index 92b15afad4..8a4dd934a2 100644 +--- a/src/net/url/url_test.go ++++ b/src/net/url/url_test.go +@@ -1314,57 +1314,125 @@ func TestQueryValues(t *testing.T) { + type parseTest struct { + query string + out Values ++ ok bool + } + + var parseTests = []parseTest{ ++ { ++ query: "a=1", ++ out: Values{"a": []string{"1"}}, ++ ok: true, ++ }, + { + query: "a=1&b=2", + out: Values{"a": []string{"1"}, "b": []string{"2"}}, ++ ok: true, + }, + { + query: "a=1&a=2&a=banana", + out: Values{"a": []string{"1", "2", "banana"}}, ++ ok: true, + }, + { + query: "ascii=%3Ckey%3A+0x90%3E", + out: Values{"ascii": []string{""}}, ++ ok: true, ++ }, { ++ query: "a=1;b=2", ++ out: Values{}, ++ ok: false, ++ }, { ++ query: "a;b=1", ++ out: Values{}, ++ ok: false, ++ }, { ++ query: "a=%3B", // hex encoding for semicolon ++ out: Values{"a": []string{";"}}, ++ ok: true, + }, + { +- query: "a=1;b=2", +- out: Values{"a": []string{"1"}, "b": []string{"2"}}, ++ query: "a%3Bb=1", ++ out: Values{"a;b": []string{"1"}}, ++ ok: true, + }, + { + query: "a=1&a=2;a=banana", +- out: Values{"a": []string{"1", "2", "banana"}}, ++ out: Values{"a": []string{"1"}}, ++ ok: false, ++ }, ++ { ++ query: "a;b&c=1", ++ out: Values{"c": []string{"1"}}, ++ ok: false, ++ }, ++ { ++ query: "a=1&b=2;a=3&c=4", ++ out: Values{"a": []string{"1"}, "c": []string{"4"}}, ++ ok: false, ++ }, ++ { ++ query: "a=1&b=2;c=3", ++ out: Values{"a": []string{"1"}}, ++ ok: false, ++ }, ++ { ++ query: ";", ++ out: Values{}, ++ ok: false, ++ }, ++ { ++ query: "a=1;", ++ out: Values{}, ++ ok: false, ++ }, ++ { ++ query: "a=1&;", ++ out: Values{"a": []string{"1"}}, ++ ok: false, ++ }, ++ { ++ query: ";a=1&b=2", ++ out: Values{"b": []string{"2"}}, ++ ok: false, ++ }, ++ { ++ query: "a=1&b=2;", ++ out: Values{"a": []string{"1"}}, ++ ok: false, + }, + } + + func TestParseQuery(t *testing.T) { +- for i, test := range parseTests { +- form, err := ParseQuery(test.query) +- if err != nil { +- t.Errorf("test %d: Unexpected error: %v", i, err) +- continue +- } +- if len(form) != len(test.out) { +- t.Errorf("test %d: len(form) = %d, want %d", i, len(form), len(test.out)) +- } +- for k, evs := range test.out { +- vs, ok := form[k] +- if !ok { +- t.Errorf("test %d: Missing key %q", i, k) +- continue ++ for _, test := range parseTests { ++ t.Run(test.query, func(t *testing.T) { ++ form, err := ParseQuery(test.query) ++ if test.ok != (err == nil) { ++ want := "" ++ if test.ok { ++ want = "" ++ } ++ t.Errorf("Unexpected error: %v, want %v", err, want) + } +- if len(vs) != len(evs) { +- t.Errorf("test %d: len(form[%q]) = %d, want %d", i, k, len(vs), len(evs)) +- continue ++ if len(form) != len(test.out) { ++ t.Errorf("len(form) = %d, want %d", len(form), len(test.out)) + } +- for j, ev := range evs { +- if v := vs[j]; v != ev { +- t.Errorf("test %d: form[%q][%d] = %q, want %q", i, k, j, v, ev) ++ for k, evs := range test.out { ++ vs, ok := form[k] ++ if !ok { ++ t.Errorf("Missing key %q", k) ++ continue ++ } ++ if len(vs) != len(evs) { ++ t.Errorf("len(form[%q]) = %d, want %d", k, len(vs), len(evs)) ++ continue ++ } ++ for j, ev := range evs { ++ if v := vs[j]; v != ev { ++ t.Errorf("form[%q][%d] = %q, want %q", k, j, v, ev) ++ } + } + } +- } ++ }) + } + } + +-- +2.33.0 + diff --git a/golang.spec b/golang.spec index 82a4471..4dc6013 100644 --- a/golang.spec +++ b/golang.spec @@ -58,7 +58,7 @@ Name: golang Version: 1.15.7 -Release: 20 +Release: 21 Summary: The Go Programming Language License: BSD and Public Domain URL: https://golang.org/ @@ -220,6 +220,10 @@ Patch6075: 0075-path-filepath-do-not-remove-prefix-.-when-following-.patch Patch6076: 0076-release-branch.go1.17-syscall-check-correct-group-in.patch Patch6077: 0077-release-branch.go1.16-runtime-consistently-access-po.patch Patch6078: 0078-release-branch.go1.18-net-http-update-bundled-golang.patch +Patch6079: 0079-release-branch.go1.18-regexp-limit-size-of-parsed-re.patch +Patch6080: 0080-release-branch.go1.18-net-http-httputil-avoid-query-.patch +Patch6081: 0081-release-branch.go1.18-archive-tar-limit-size-of-head.patch +Patch6082: 0082-net-url-reject-query-values-with-semicolons.patch Patch9001: 0001-drop-hard-code-cert.patch Patch9002: 0002-fix-patch-cmd-go-internal-modfetch-do-not-sho.patch @@ -459,6 +463,12 @@ fi %files devel -f go-tests.list -f go-misc.list -f go-src.list %changelog +* Mon Oct 10 2022 hanchao - 1.15.7-20 +- Type:CVE +- CVE:CVE-2022-41715,CVE-2022-2880,CVE-2022-2879 +- SUG:NA +- DESC:fix CVE-2022-41715,CVE-2022-2880,CVE-2022-2879 + * Wed Oct 05 2022 wangshuo - 1.15.7-20 - Type:bugfix - CVE:NA -- Gitee