diff options
author | 2023-07-24 10:24:03 +0000 | |
---|---|---|
committer | 2023-07-24 10:24:03 +0000 | |
commit | 81fe59dadcc1351ee4a2f23b29cc889e288613da (patch) | |
tree | a4ee6e0bbcb4d0074f86acd0111c2999bfc81204 /vendor/github.com | |
parent | [chore]: Bump github.com/yuin/goldmark from 1.5.4 to 1.5.5 (#2023) (diff) | |
download | gotosocial-81fe59dadcc1351ee4a2f23b29cc889e288613da.tar.xz |
[chore]: Bump github.com/microcosm-cc/bluemonday from 1.0.24 to 1.0.25 (#2021)
Diffstat (limited to 'vendor/github.com')
-rw-r--r-- | vendor/github.com/microcosm-cc/bluemonday/helpers.go | 6 | ||||
-rw-r--r-- | vendor/github.com/microcosm-cc/bluemonday/policy.go | 40 | ||||
-rw-r--r-- | vendor/github.com/microcosm-cc/bluemonday/sanitize.go | 57 |
3 files changed, 55 insertions, 48 deletions
diff --git a/vendor/github.com/microcosm-cc/bluemonday/helpers.go b/vendor/github.com/microcosm-cc/bluemonday/helpers.go index 2b03d7e7d..aa0b7b92d 100644 --- a/vendor/github.com/microcosm-cc/bluemonday/helpers.go +++ b/vendor/github.com/microcosm-cc/bluemonday/helpers.go @@ -222,11 +222,7 @@ func (p *Policy) AllowDataURIImages() { } _, err := base64.StdEncoding.DecodeString(url.Opaque[len(matched):]) - if err != nil { - return false - } - - return true + return err == nil }, ) } diff --git a/vendor/github.com/microcosm-cc/bluemonday/policy.go b/vendor/github.com/microcosm-cc/bluemonday/policy.go index 995f46c2d..b4f09879a 100644 --- a/vendor/github.com/microcosm-cc/bluemonday/policy.go +++ b/vendor/github.com/microcosm-cc/bluemonday/policy.go @@ -118,9 +118,18 @@ type Policy struct { allowURLSchemes map[string][]urlPolicy // These regexps are used to match allowed URL schemes, for example - // if one would want to allow all URL schemes, they would add `.+` + // if one would want to allow all URL schemes, they would add `.+`. + // However pay attention as this can lead to XSS being rendered thus + // defeating the purpose of using a HTML sanitizer. + // The regexps are only considered if a schema was not explicitly + // handled by `AllowURLSchemes` or `AllowURLSchemeWithCustomPolicy`. allowURLSchemeRegexps []*regexp.Regexp + // If srcRewriter is not nil, it is used to rewrite the src attribute + // of tags that download resources, such as <img> and <script>. + // It requires that the URL is parsable by "net/url" url.Parse(). + srcRewriter urlRewriter + // If an element has had all attributes removed as a result of a policy // being applied, then the element would be removed from the output. // @@ -196,6 +205,8 @@ type stylePolicyBuilder struct { type urlPolicy func(url *url.URL) (allowUrl bool) +type urlRewriter func(*url.URL) + type SandboxValue int64 const ( @@ -575,6 +586,33 @@ func (p *Policy) AllowURLSchemesMatching(r *regexp.Regexp) *Policy { return p } +// RewriteSrc will rewrite the src attribute of a resource downloading tag +// (e.g. <img>, <script>, <iframe>) using the provided function. +// +// Typically the use case here is that if the content that we're sanitizing +// is untrusted then the content that is inlined is also untrusted. +// To prevent serving this content on the same domain as the content appears +// on it is good practise to proxy the content through an additional domain +// name as this will force the web client to consider the inline content as +// third party to the main content, thus providing browser isolation around +// the inline content. +// +// An example of this is a web mail provider like fastmail.com , when an +// email (user generated content) is displayed, the email text is shown on +// fastmail.com but the inline attachments and content are rendered from +// fastmailusercontent.com . This proxying of the external content on a +// domain that is different to the content domain forces the browser domain +// security model to kick in. Note that this only applies to differences +// below the suffix (as per the publix suffix list). +// +// This is a good practise to adopt as it prevents the content from being +// able to set cookies on the main domain and thus prevents the content on +// the main domain from being able to read those cookies. +func (p *Policy) RewriteSrc(fn urlRewriter) *Policy { + p.srcRewriter = fn + return p +} + // RequireNoFollowOnLinks will result in all a, area, link tags having a // rel="nofollow"added to them if one does not already exist // diff --git a/vendor/github.com/microcosm-cc/bluemonday/sanitize.go b/vendor/github.com/microcosm-cc/bluemonday/sanitize.go index 9121aefb0..2792fb33b 100644 --- a/vendor/github.com/microcosm-cc/bluemonday/sanitize.go +++ b/vendor/github.com/microcosm-cc/bluemonday/sanitize.go @@ -95,41 +95,6 @@ func (p *Policy) SanitizeReaderToWriter(r io.Reader, w io.Writer) error { return p.sanitize(r, w) } -const escapedURLChars = "'<>\"\r" - -func escapeUrlComponent(w stringWriterWriter, val string) error { - i := strings.IndexAny(val, escapedURLChars) - for i != -1 { - if _, err := w.WriteString(val[:i]); err != nil { - return err - } - var esc string - switch val[i] { - case '\'': - // "'" is shorter than "'" and apos was not in HTML until HTML5. - esc = "'" - case '<': - esc = "<" - case '>': - esc = ">" - case '"': - // """ is shorter than """. - esc = """ - case '\r': - esc = " " - default: - panic("unrecognized escape character") - } - val = val[i+1:] - if _, err := w.WriteString(esc); err != nil { - return err - } - i = strings.IndexAny(val, escapedURLChars) - } - _, err := w.WriteString(val) - return err -} - // Query represents a single part of the query string, a query param type Query struct { Key string @@ -612,6 +577,14 @@ attrsLoop: case "audio", "embed", "iframe", "img", "script", "source", "track", "video": if htmlAttr.Key == "src" { if u, ok := p.validURL(htmlAttr.Val); ok { + if p.srcRewriter != nil { + parsedURL, err := url.Parse(u) + if err != nil { + fmt.Println(err) + } + p.srcRewriter(parsedURL) + u = parsedURL.String() + } htmlAttr.Val = u tmpAttrs = append(tmpAttrs, htmlAttr) } @@ -970,14 +943,14 @@ func (p *Policy) validURL(rawurl string) (string, bool) { } if u.Scheme != "" { - for _, r := range p.allowURLSchemeRegexps { - if r.MatchString(u.Scheme) { - return u.String(), true - } - } - urlPolicies, ok := p.allowURLSchemes[u.Scheme] if !ok { + for _, r := range p.allowURLSchemeRegexps { + if r.MatchString(u.Scheme) { + return u.String(), true + } + } + return "", false } @@ -986,7 +959,7 @@ func (p *Policy) validURL(rawurl string) (string, bool) { } for _, urlPolicy := range urlPolicies { - if urlPolicy(u) == true { + if urlPolicy(u) { return u.String(), true } } |