diff options
Diffstat (limited to 'vendor/github.com')
45 files changed, 1551 insertions, 615 deletions
diff --git a/vendor/github.com/KimMachineGun/automemlimit/memlimit/memlimit.go b/vendor/github.com/KimMachineGun/automemlimit/memlimit/memlimit.go index f7aaf071c..89404b3d2 100644 --- a/vendor/github.com/KimMachineGun/automemlimit/memlimit/memlimit.go +++ b/vendor/github.com/KimMachineGun/automemlimit/memlimit/memlimit.go @@ -167,7 +167,7 @@ func SetGoMemLimitWithOpts(opts ...Option) (_ int64, _err error) { limit, err := setGoMemLimit(ApplyRatio(cfg.provider, ratio)) if err != nil { if errors.Is(err, ErrNoLimit) { - cfg.logger.Info("memory is not limited, skipping: %v", err) + cfg.logger.Info("memory is not limited, skipping") return 0, nil } return 0, fmt.Errorf("failed to set GOMEMLIMIT: %w", err) diff --git a/vendor/github.com/gin-gonic/gin/.gitignore b/vendor/github.com/gin-gonic/gin/.gitignore index bdd50c95c..1ea0e2b95 100644 --- a/vendor/github.com/gin-gonic/gin/.gitignore +++ b/vendor/github.com/gin-gonic/gin/.gitignore @@ -5,3 +5,7 @@ count.out test profile.out tmp.out + +# Develop tools +.idea/ +.vscode/ diff --git a/vendor/github.com/gin-gonic/gin/.golangci.yml b/vendor/github.com/gin-gonic/gin/.golangci.yml index 91dae02c3..4a72f7342 100644 --- a/vendor/github.com/gin-gonic/gin/.golangci.yml +++ b/vendor/github.com/gin-gonic/gin/.golangci.yml @@ -3,7 +3,6 @@ run: linters: enable: - asciicheck - - depguard - dogsled - durationcheck - errcheck diff --git a/vendor/github.com/gin-gonic/gin/.goreleaser.yaml b/vendor/github.com/gin-gonic/gin/.goreleaser.yaml index e435e56aa..99b66fee7 100644 --- a/vendor/github.com/gin-gonic/gin/.goreleaser.yaml +++ b/vendor/github.com/gin-gonic/gin/.goreleaser.yaml @@ -1,8 +1,7 @@ project_name: gin builds: - - - # If true, skip the build. + - # If true, skip the build. # Useful for library projects. # Default is false skip: true @@ -10,7 +9,7 @@ builds: changelog: # Set it to true if you wish to skip the changelog generation. # This may result in an empty release notes on GitHub/GitLab/Gitea. - skip: false + disable: false # Changelog generation implementation to use. # @@ -21,7 +20,7 @@ changelog: # - `github-native`: uses the GitHub release notes generation API, disables the groups feature. # # Defaults to `git`. - use: git + use: github # Sorts the changelog by the commit's messages. # Could either be asc, desc or empty @@ -38,20 +37,20 @@ changelog: - title: Features regexp: "^.*feat[(\\w)]*:+.*$" order: 0 - - title: 'Bug fixes' + - title: "Bug fixes" regexp: "^.*fix[(\\w)]*:+.*$" order: 1 - - title: 'Enhancements' + - title: "Enhancements" regexp: "^.*chore[(\\w)]*:+.*$" order: 2 + - title: "Refactor" + regexp: "^.*refactor[(\\w)]*:+.*$" + order: 3 + - title: "Build process updates" + regexp: ^.*?(build|ci)(\(.+\))??!?:.+$ + order: 4 + - title: "Documentation updates" + regexp: ^.*?docs?(\(.+\))??!?:.+$ + order: 4 - title: Others order: 999 - - filters: - # Commit messages matching the regexp listed here will be removed from - # the changelog - # Default is empty - exclude: - - '^docs' - - 'CICD' - - typo diff --git a/vendor/github.com/gin-gonic/gin/Makefile b/vendor/github.com/gin-gonic/gin/Makefile index ebde4ee84..b58f24f32 100644 --- a/vendor/github.com/gin-gonic/gin/Makefile +++ b/vendor/github.com/gin-gonic/gin/Makefile @@ -42,6 +42,7 @@ fmt-check: exit 1; \ fi; +.PHONY: vet vet: $(GO) vet $(VETPACKAGES) diff --git a/vendor/github.com/gin-gonic/gin/auth.go b/vendor/github.com/gin-gonic/gin/auth.go index 2503c5156..5d3222d56 100644 --- a/vendor/github.com/gin-gonic/gin/auth.go +++ b/vendor/github.com/gin-gonic/gin/auth.go @@ -16,6 +16,9 @@ import ( // AuthUserKey is the cookie name for user credential in basic auth. const AuthUserKey = "user" +// AuthProxyUserKey is the cookie name for proxy_user credential in basic auth for proxy. +const AuthProxyUserKey = "proxy_user" + // Accounts defines a key/value for user/pass list of authorized logins. type Accounts map[string]string @@ -89,3 +92,25 @@ func authorizationHeader(user, password string) string { base := user + ":" + password return "Basic " + base64.StdEncoding.EncodeToString(bytesconv.StringToBytes(base)) } + +// BasicAuthForProxy returns a Basic HTTP Proxy-Authorization middleware. +// If the realm is empty, "Proxy Authorization Required" will be used by default. +func BasicAuthForProxy(accounts Accounts, realm string) HandlerFunc { + if realm == "" { + realm = "Proxy Authorization Required" + } + realm = "Basic realm=" + strconv.Quote(realm) + pairs := processAccounts(accounts) + return func(c *Context) { + proxyUser, found := pairs.searchCredential(c.requestHeader("Proxy-Authorization")) + if !found { + // Credentials doesn't match, we return 407 and abort handlers chain. + c.Header("Proxy-Authenticate", realm) + c.AbortWithStatus(http.StatusProxyAuthRequired) + return + } + // The proxy_user credentials was found, set proxy_user's id to key AuthProxyUserKey in this context, the proxy_user's id can be read later using + // c.MustGet(gin.AuthProxyUserKey). + c.Set(AuthProxyUserKey, proxyUser) + } +} diff --git a/vendor/github.com/gin-gonic/gin/binding/binding.go b/vendor/github.com/gin-gonic/gin/binding/binding.go index 40948529a..94723879a 100644 --- a/vendor/github.com/gin-gonic/gin/binding/binding.go +++ b/vendor/github.com/gin-gonic/gin/binding/binding.go @@ -21,6 +21,7 @@ const ( MIMEMSGPACK = "application/x-msgpack" MIMEMSGPACK2 = "application/msgpack" MIMEYAML = "application/x-yaml" + MIMEYAML2 = "application/yaml" MIMETOML = "application/toml" ) @@ -72,18 +73,18 @@ var Validator StructValidator = &defaultValidator{} // These implement the Binding interface and can be used to bind the data // present in the request to struct instances. var ( - JSON = jsonBinding{} - XML = xmlBinding{} - Form = formBinding{} - Query = queryBinding{} - FormPost = formPostBinding{} - FormMultipart = formMultipartBinding{} - ProtoBuf = protobufBinding{} - MsgPack = msgpackBinding{} - YAML = yamlBinding{} - Uri = uriBinding{} - Header = headerBinding{} - TOML = tomlBinding{} + JSON BindingBody = jsonBinding{} + XML BindingBody = xmlBinding{} + Form Binding = formBinding{} + Query Binding = queryBinding{} + FormPost Binding = formPostBinding{} + FormMultipart Binding = formMultipartBinding{} + ProtoBuf BindingBody = protobufBinding{} + MsgPack BindingBody = msgpackBinding{} + YAML BindingBody = yamlBinding{} + Uri BindingUri = uriBinding{} + Header Binding = headerBinding{} + TOML BindingBody = tomlBinding{} ) // Default returns the appropriate Binding instance based on the HTTP method @@ -102,7 +103,7 @@ func Default(method, contentType string) Binding { return ProtoBuf case MIMEMSGPACK, MIMEMSGPACK2: return MsgPack - case MIMEYAML: + case MIMEYAML, MIMEYAML2: return YAML case MIMETOML: return TOML diff --git a/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go b/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go index 93ad8ba30..552a86b2d 100644 --- a/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go +++ b/vendor/github.com/gin-gonic/gin/binding/binding_nomsgpack.go @@ -19,6 +19,7 @@ const ( MIMEMultipartPOSTForm = "multipart/form-data" MIMEPROTOBUF = "application/x-protobuf" MIMEYAML = "application/x-yaml" + MIMEYAML2 = "application/yaml" MIMETOML = "application/toml" ) @@ -96,7 +97,7 @@ func Default(method, contentType string) Binding { return XML case MIMEPROTOBUF: return ProtoBuf - case MIMEYAML: + case MIMEYAML, MIMEYAML2: return YAML case MIMEMultipartPOSTForm: return FormMultipart diff --git a/vendor/github.com/gin-gonic/gin/binding/default_validator.go b/vendor/github.com/gin-gonic/gin/binding/default_validator.go index e216b8546..ac43d7cc5 100644 --- a/vendor/github.com/gin-gonic/gin/binding/default_validator.go +++ b/vendor/github.com/gin-gonic/gin/binding/default_validator.go @@ -54,7 +54,10 @@ func (v *defaultValidator) ValidateStruct(obj any) error { value := reflect.ValueOf(obj) switch value.Kind() { case reflect.Ptr: - return v.ValidateStruct(value.Elem().Interface()) + if value.Elem().Kind() != reflect.Struct { + return v.ValidateStruct(value.Elem().Interface()) + } + return v.validateStruct(obj) case reflect.Struct: return v.validateStruct(obj) case reflect.Slice, reflect.Array: diff --git a/vendor/github.com/gin-gonic/gin/binding/form_mapping.go b/vendor/github.com/gin-gonic/gin/binding/form_mapping.go index 540bbbb84..db235e566 100644 --- a/vendor/github.com/gin-gonic/gin/binding/form_mapping.go +++ b/vendor/github.com/gin-gonic/gin/binding/form_mapping.go @@ -7,6 +7,7 @@ package binding import ( "errors" "fmt" + "mime/multipart" "reflect" "strconv" "strings" @@ -164,6 +165,23 @@ func tryToSetValue(value reflect.Value, field reflect.StructField, setter setter return setter.TrySet(value, field, tagValue, setOpt) } +// BindUnmarshaler is the interface used to wrap the UnmarshalParam method. +type BindUnmarshaler interface { + // UnmarshalParam decodes and assigns a value from an form or query param. + UnmarshalParam(param string) error +} + +// trySetCustom tries to set a custom type value +// If the value implements the BindUnmarshaler interface, it will be used to set the value, we will return `true` +// to skip the default value setting. +func trySetCustom(val string, value reflect.Value) (isSet bool, err error) { + switch v := value.Addr().Interface().(type) { + case BindUnmarshaler: + return true, v.UnmarshalParam(val) + } + return false, nil +} + func setByForm(value reflect.Value, field reflect.StructField, form map[string][]string, tagValue string, opt setOptions) (isSet bool, err error) { vs, ok := form[tagValue] if !ok && !opt.isDefaultExists { @@ -193,6 +211,9 @@ func setByForm(value reflect.Value, field reflect.StructField, form map[string][ if len(vs) > 0 { val = vs[0] } + if ok, err := trySetCustom(val, value); ok { + return ok, err + } return true, setWithProperType(val, value, field) } } @@ -235,10 +256,17 @@ func setWithProperType(val string, value reflect.Value, field reflect.StructFiel switch value.Interface().(type) { case time.Time: return setTimeField(val, field, value) + case multipart.FileHeader: + return nil } return json.Unmarshal(bytesconv.StringToBytes(val), value.Addr().Interface()) case reflect.Map: return json.Unmarshal(bytesconv.StringToBytes(val), value.Addr().Interface()) + case reflect.Ptr: + if !value.Elem().IsValid() { + value.Set(reflect.New(value.Type().Elem())) + } + return setWithProperType(val, value.Elem(), field) default: return errUnknownType } diff --git a/vendor/github.com/gin-gonic/gin/codecov.yml b/vendor/github.com/gin-gonic/gin/codecov.yml new file mode 100644 index 000000000..47782e50d --- /dev/null +++ b/vendor/github.com/gin-gonic/gin/codecov.yml @@ -0,0 +1,13 @@ +coverage: + require_ci_to_pass: true + + status: + project: + default: + target: 99% + threshold: 99% + + patch: + default: + target: 99% + threshold: 95%
\ No newline at end of file diff --git a/vendor/github.com/gin-gonic/gin/context.go b/vendor/github.com/gin-gonic/gin/context.go index 420ff1678..391adafed 100644 --- a/vendor/github.com/gin-gonic/gin/context.go +++ b/vendor/github.com/gin-gonic/gin/context.go @@ -43,6 +43,10 @@ const BodyBytesKey = "_gin-gonic/gin/bodybyteskey" // ContextKey is the key that a Context returns itself for. const ContextKey = "_gin-gonic/gin/contextkey" +type ContextKeyType int + +const ContextRequestKey ContextKeyType = 0 + // abortIndex represents a typical value used in abort functions. const abortIndex int8 = math.MaxInt8 >> 1 @@ -113,20 +117,27 @@ func (c *Context) Copy() *Context { cp := Context{ writermem: c.writermem, Request: c.Request, - Params: c.Params, engine: c.engine, } + cp.writermem.ResponseWriter = nil cp.Writer = &cp.writermem cp.index = abortIndex cp.handlers = nil - cp.Keys = map[string]any{} - for k, v := range c.Keys { + cp.fullPath = c.fullPath + + cKeys := c.Keys + cp.Keys = make(map[string]any, len(cKeys)) + c.mu.RLock() + for k, v := range cKeys { cp.Keys[k] = v } - paramCopy := make([]Param, len(cp.Params)) - copy(paramCopy, cp.Params) - cp.Params = paramCopy + c.mu.RUnlock() + + cParams := c.Params + cp.Params = make([]Param, len(cParams)) + copy(cp.Params, cParams) + return &cp } @@ -386,7 +397,7 @@ func (c *Context) GetStringMapStringSlice(key string) (smss map[string][]string) // // router.GET("/user/:id", func(c *gin.Context) { // // a GET request to /user/john -// id := c.Param("id") // id == "/john" +// id := c.Param("id") // id == "john" // // a GET request to /user/john/ // id := c.Param("id") // id == "/john/" // }) @@ -728,7 +739,7 @@ func (c *Context) ShouldBindHeader(obj any) error { // ShouldBindUri binds the passed struct pointer using the specified binding engine. func (c *Context) ShouldBindUri(obj any) error { - m := make(map[string][]string) + m := make(map[string][]string, len(c.Params)) for _, v := range c.Params { m[v.Key] = []string{v.Value} } @@ -763,6 +774,26 @@ func (c *Context) ShouldBindBodyWith(obj any, bb binding.BindingBody) (err error return bb.BindBody(body, obj) } +// ShouldBindBodyWithJSON is a shortcut for c.ShouldBindBodyWith(obj, binding.JSON). +func (c *Context) ShouldBindBodyWithJSON(obj any) error { + return c.ShouldBindBodyWith(obj, binding.JSON) +} + +// ShouldBindBodyWithXML is a shortcut for c.ShouldBindBodyWith(obj, binding.XML). +func (c *Context) ShouldBindBodyWithXML(obj any) error { + return c.ShouldBindBodyWith(obj, binding.XML) +} + +// ShouldBindBodyWithYAML is a shortcut for c.ShouldBindBodyWith(obj, binding.YAML). +func (c *Context) ShouldBindBodyWithYAML(obj any) error { + return c.ShouldBindBodyWith(obj, binding.YAML) +} + +// ShouldBindBodyWithTOML is a shortcut for c.ShouldBindBodyWith(obj, binding.TOML). +func (c *Context) ShouldBindBodyWithTOML(obj any) error { + return c.ShouldBindBodyWith(obj, binding.TOML) +} + // ClientIP implements one best effort algorithm to return the real client IP. // It calls c.RemoteIP() under the hood, to check if the remote IP is a trusted proxy or not. // If it is it will then try to parse the headers defined in Engine.RemoteIPHeaders (defaulting to [X-Forwarded-For, X-Real-Ip]). @@ -873,6 +904,9 @@ func (c *Context) GetHeader(key string) string { // GetRawData returns stream data. func (c *Context) GetRawData() ([]byte, error) { + if c.Request.Body == nil { + return nil, errors.New("cannot read nil body") + } return io.ReadAll(c.Request.Body) } @@ -1215,7 +1249,7 @@ func (c *Context) Err() error { // if no value is associated with key. Successive calls to Value with // the same key returns the same result. func (c *Context) Value(key any) any { - if key == 0 { + if key == ContextRequestKey { return c.Request } if key == ContextKey { diff --git a/vendor/github.com/gin-gonic/gin/debug.go b/vendor/github.com/gin-gonic/gin/debug.go index 1fc0cafe1..1761fe325 100644 --- a/vendor/github.com/gin-gonic/gin/debug.go +++ b/vendor/github.com/gin-gonic/gin/debug.go @@ -23,6 +23,9 @@ func IsDebugging() bool { // DebugPrintRouteFunc indicates debug log output format. var DebugPrintRouteFunc func(httpMethod, absolutePath, handlerName string, nuHandlers int) +// DebugPrintFunc indicates debug log output format. +var DebugPrintFunc func(format string, values ...interface{}) + func debugPrintRoute(httpMethod, absolutePath string, handlers HandlersChain) { if IsDebugging() { nuHandlers := len(handlers) @@ -48,12 +51,19 @@ func debugPrintLoadTemplate(tmpl *template.Template) { } func debugPrint(format string, values ...any) { - if IsDebugging() { - if !strings.HasSuffix(format, "\n") { - format += "\n" - } - fmt.Fprintf(DefaultWriter, "[GIN-debug] "+format, values...) + if !IsDebugging() { + return + } + + if DebugPrintFunc != nil { + DebugPrintFunc(format, values...) + return + } + + if !strings.HasSuffix(format, "\n") { + format += "\n" } + fmt.Fprintf(DefaultWriter, "[GIN-debug] "+format, values...) } func getMinVer(v string) (uint64, error) { diff --git a/vendor/github.com/gin-gonic/gin/deprecated.go b/vendor/github.com/gin-gonic/gin/deprecated.go index 9521308f1..b4c6cd88e 100644 --- a/vendor/github.com/gin-gonic/gin/deprecated.go +++ b/vendor/github.com/gin-gonic/gin/deprecated.go @@ -12,6 +12,8 @@ import ( // BindWith binds the passed struct pointer using the specified binding engine. // See the binding package. +// +// Deprecated: Use MustBindWith or ShouldBindWith. func (c *Context) BindWith(obj any, b binding.Binding) error { log.Println(`BindWith(\"any, binding.Binding\") error is going to be deprecated, please check issue #662 and either use MustBindWith() if you diff --git a/vendor/github.com/gin-gonic/gin/gin.go b/vendor/github.com/gin-gonic/gin/gin.go index ed8b6dad7..1633fe13f 100644 --- a/vendor/github.com/gin-gonic/gin/gin.go +++ b/vendor/github.com/gin-gonic/gin/gin.go @@ -47,6 +47,9 @@ var regRemoveRepeatedChar = regexp.MustCompile("/{2,}") // HandlerFunc defines the handler used by gin middleware as return value. type HandlerFunc func(*Context) +// OptionFunc defines the function to change the default configuration +type OptionFunc func(*Engine) + // HandlersChain defines a HandlerFunc slice. type HandlersChain []HandlerFunc @@ -77,6 +80,8 @@ const ( // PlatformCloudflare when using Cloudflare's CDN. Trust CF-Connecting-IP for determining // the client's IP PlatformCloudflare = "CF-Connecting-IP" + // PlatformFlyIO when running on Fly.io. Trust Fly-Client-IP for determining the client's IP + PlatformFlyIO = "Fly-Client-IP" ) // Engine is the framework's instance, it contains the muxer, middleware and configuration settings. @@ -180,7 +185,7 @@ var _ IRouter = (*Engine)(nil) // - ForwardedByClientIP: true // - UseRawPath: false // - UnescapePathValues: true -func New() *Engine { +func New(opts ...OptionFunc) *Engine { debugPrintWARNINGNew() engine := &Engine{ RouterGroup: RouterGroup{ @@ -209,15 +214,15 @@ func New() *Engine { engine.pool.New = func() any { return engine.allocateContext(engine.maxParams) } - return engine + return engine.With(opts...) } // Default returns an Engine instance with the Logger and Recovery middleware already attached. -func Default() *Engine { +func Default(opts ...OptionFunc) *Engine { debugPrintWARNINGDefault() engine := New() engine.Use(Logger(), Recovery()) - return engine + return engine.With(opts...) } func (engine *Engine) Handler() http.Handler { @@ -311,6 +316,15 @@ func (engine *Engine) Use(middleware ...HandlerFunc) IRoutes { return engine } +// With returns a new Engine instance with the provided options. +func (engine *Engine) With(opts ...OptionFunc) *Engine { + for _, opt := range opts { + opt(engine) + } + + return engine +} + func (engine *Engine) rebuild404Handlers() { engine.allNoRoute = engine.combineHandlers(engine.noRoute) } @@ -334,7 +348,6 @@ func (engine *Engine) addRoute(method, path string, handlers HandlersChain) { } root.addRoute(path, handlers) - // Update maxParams if paramsCount := countParams(path); paramsCount > engine.maxParams { engine.maxParams = paramsCount } @@ -634,17 +647,25 @@ func (engine *Engine) handleHTTPRequest(c *Context) { } if engine.HandleMethodNotAllowed { + // According to RFC 7231 section 6.5.5, MUST generate an Allow header field in response + // containing a list of the target resource's currently supported methods. + allowed := make([]string, 0, len(t)-1) for _, tree := range engine.trees { if tree.method == httpMethod { continue } if value := tree.root.getValue(rPath, nil, c.skippedNodes, unescape); value.handlers != nil { - c.handlers = engine.allNoMethod - serveError(c, http.StatusMethodNotAllowed, default405Body) - return + allowed = append(allowed, tree.method) } } + if len(allowed) > 0 { + c.handlers = engine.allNoMethod + c.writermem.Header().Set("Allow", strings.Join(allowed, ", ")) + serveError(c, http.StatusMethodNotAllowed, default405Body) + return + } } + c.handlers = engine.allNoRoute serveError(c, http.StatusNotFound, default404Body) } diff --git a/vendor/github.com/gin-gonic/gin/logger.go b/vendor/github.com/gin-gonic/gin/logger.go index cd1e7fa6e..db2c6832b 100644 --- a/vendor/github.com/gin-gonic/gin/logger.go +++ b/vendor/github.com/gin-gonic/gin/logger.go @@ -47,8 +47,15 @@ type LoggerConfig struct { // SkipPaths is an url path array which logs are not written. // Optional. SkipPaths []string + + // Skip is a Skipper that indicates which logs should not be written. + // Optional. + Skip Skipper } +// Skipper is a function to skip logs based on provided Context +type Skipper func(c *Context) bool + // LogFormatter gives the signature of the formatter function passed to LoggerWithFormatter type LogFormatter func(params LogFormatterParams) string @@ -83,6 +90,8 @@ func (p *LogFormatterParams) StatusCodeColor() string { code := p.StatusCode switch { + case code >= http.StatusContinue && code < http.StatusOK: + return white case code >= http.StatusOK && code < http.StatusMultipleChoices: return green case code >= http.StatusMultipleChoices && code < http.StatusBadRequest: @@ -239,32 +248,34 @@ func LoggerWithConfig(conf LoggerConfig) HandlerFunc { // Process request c.Next() - // Log only when path is not being skipped - if _, ok := skip[path]; !ok { - param := LogFormatterParams{ - Request: c.Request, - isTerm: isTerm, - Keys: c.Keys, - } - - // Stop timer - param.TimeStamp = time.Now() - param.Latency = param.TimeStamp.Sub(start) + // Log only when it is not being skipped + if _, ok := skip[path]; ok || (conf.Skip != nil && conf.Skip(c)) { + return + } - param.ClientIP = c.ClientIP() - param.Method = c.Request.Method - param.StatusCode = c.Writer.Status() - param.ErrorMessage = c.Errors.ByType(ErrorTypePrivate).String() + param := LogFormatterParams{ + Request: c.Request, + isTerm: isTerm, + Keys: c.Keys, + } - param.BodySize = c.Writer.Size() + // Stop timer + param.TimeStamp = time.Now() + param.Latency = param.TimeStamp.Sub(start) - if raw != "" { - path = path + "?" + raw - } + param.ClientIP = c.ClientIP() + param.Method = c.Request.Method + param.StatusCode = c.Writer.Status() + param.ErrorMessage = c.Errors.ByType(ErrorTypePrivate).String() - param.Path = path + param.BodySize = c.Writer.Size() - fmt.Fprint(out, formatter(param)) + if raw != "" { + path = path + "?" + raw } + + param.Path = path + + fmt.Fprint(out, formatter(param)) } } diff --git a/vendor/github.com/gin-gonic/gin/render/render.go b/vendor/github.com/gin-gonic/gin/render/render.go index 7955000c6..4bdcfa232 100644 --- a/vendor/github.com/gin-gonic/gin/render/render.go +++ b/vendor/github.com/gin-gonic/gin/render/render.go @@ -15,22 +15,22 @@ type Render interface { } var ( - _ Render = JSON{} - _ Render = IndentedJSON{} - _ Render = SecureJSON{} - _ Render = JsonpJSON{} - _ Render = XML{} - _ Render = String{} - _ Render = Redirect{} - _ Render = Data{} - _ Render = HTML{} - _ HTMLRender = HTMLDebug{} - _ HTMLRender = HTMLProduction{} - _ Render = YAML{} - _ Render = Reader{} - _ Render = AsciiJSON{} - _ Render = ProtoBuf{} - _ Render = TOML{} + _ Render = (*JSON)(nil) + _ Render = (*IndentedJSON)(nil) + _ Render = (*SecureJSON)(nil) + _ Render = (*JsonpJSON)(nil) + _ Render = (*XML)(nil) + _ Render = (*String)(nil) + _ Render = (*Redirect)(nil) + _ Render = (*Data)(nil) + _ Render = (*HTML)(nil) + _ HTMLRender = (*HTMLDebug)(nil) + _ HTMLRender = (*HTMLProduction)(nil) + _ Render = (*YAML)(nil) + _ Render = (*Reader)(nil) + _ Render = (*AsciiJSON)(nil) + _ Render = (*ProtoBuf)(nil) + _ Render = (*TOML)(nil) ) func writeContentType(w http.ResponseWriter, value []string) { diff --git a/vendor/github.com/gin-gonic/gin/render/yaml.go b/vendor/github.com/gin-gonic/gin/render/yaml.go index fc927c1f2..042bb821d 100644 --- a/vendor/github.com/gin-gonic/gin/render/yaml.go +++ b/vendor/github.com/gin-gonic/gin/render/yaml.go @@ -15,7 +15,7 @@ type YAML struct { Data any } -var yamlContentType = []string{"application/x-yaml; charset=utf-8"} +var yamlContentType = []string{"application/yaml; charset=utf-8"} // Render (YAML) marshals the given interface object and writes data with custom ContentType. func (r YAML) Render(w http.ResponseWriter) error { diff --git a/vendor/github.com/gin-gonic/gin/tree.go b/vendor/github.com/gin-gonic/gin/tree.go index dda8f4f7b..878023d1c 100644 --- a/vendor/github.com/gin-gonic/gin/tree.go +++ b/vendor/github.com/gin-gonic/gin/tree.go @@ -351,7 +351,10 @@ func (n *node) insertChild(path string, fullPath string, handlers HandlersChain) } if len(n.path) > 0 && n.path[len(n.path)-1] == '/' { - pathSeg := strings.SplitN(n.children[0].path, "/", 2)[0] + pathSeg := "" + if len(n.children) != 0 { + pathSeg = strings.SplitN(n.children[0].path, "/", 2)[0] + } panic("catch-all wildcard '" + path + "' in new path '" + fullPath + "' conflicts with existing path segment '" + pathSeg + @@ -478,7 +481,7 @@ walk: // Outer loop for walking the tree // We can recommend to redirect to the same URL without a // trailing slash if a leaf exists for that path. value.tsr = path == "/" && n.handlers != nil - return + return value } // Handle wildcard child, which is always at the end of the array @@ -497,7 +500,14 @@ walk: // Outer loop for walking the tree } // Save param value - if params != nil && cap(*params) > 0 { + if params != nil { + // Preallocate capacity if necessary + if cap(*params) < int(globalParamsCount) { + newParams := make(Params, len(*params), globalParamsCount) + copy(newParams, *params) + *params = newParams + } + if value.params == nil { value.params = params } @@ -526,12 +536,12 @@ walk: // Outer loop for walking the tree // ... but we can't value.tsr = len(path) == end+1 - return + return value } if value.handlers = n.handlers; value.handlers != nil { value.fullPath = n.fullPath - return + return value } if len(n.children) == 1 { // No handle found. Check if a handle for this path + a @@ -539,11 +549,18 @@ walk: // Outer loop for walking the tree n = n.children[0] value.tsr = (n.path == "/" && n.handlers != nil) || (n.path == "" && n.indices == "/") } - return + return value case catchAll: // Save param value if params != nil { + // Preallocate capacity if necessary + if cap(*params) < int(globalParamsCount) { + newParams := make(Params, len(*params), globalParamsCount) + copy(newParams, *params) + *params = newParams + } + if value.params == nil { value.params = params } @@ -564,7 +581,7 @@ walk: // Outer loop for walking the tree value.handlers = n.handlers value.fullPath = n.fullPath - return + return value default: panic("invalid node type") @@ -595,7 +612,7 @@ walk: // Outer loop for walking the tree // Check if this node has a handle registered. if value.handlers = n.handlers; value.handlers != nil { value.fullPath = n.fullPath - return + return value } // If there is no handle for this route, but this route has a @@ -603,12 +620,12 @@ walk: // Outer loop for walking the tree // additional trailing slash if path == "/" && n.wildChild && n.nType != root { value.tsr = true - return + return value } if path == "/" && n.nType == static { value.tsr = true - return + return value } // No handle found. Check if a handle for this path + a @@ -618,11 +635,11 @@ walk: // Outer loop for walking the tree n = n.children[i] value.tsr = (len(n.path) == 1 && n.handlers != nil) || (n.nType == catchAll && n.children[0].handlers != nil) - return + return value } } - return + return value } // Nothing found. We can recommend to redirect to the same URL with an @@ -648,7 +665,7 @@ walk: // Outer loop for walking the tree } } - return + return value } } diff --git a/vendor/github.com/gin-gonic/gin/version.go b/vendor/github.com/gin-gonic/gin/version.go index 85462e555..93ad96541 100644 --- a/vendor/github.com/gin-gonic/gin/version.go +++ b/vendor/github.com/gin-gonic/gin/version.go @@ -5,4 +5,4 @@ package gin // Version is the current gin framework's version. -const Version = "v1.9.1" +const Version = "v1.10.0" diff --git a/vendor/github.com/go-swagger/go-swagger/generator/operation.go b/vendor/github.com/go-swagger/go-swagger/generator/operation.go index e606e5143..07ff8a230 100644 --- a/vendor/github.com/go-swagger/go-swagger/generator/operation.go +++ b/vendor/github.com/go-swagger/go-swagger/generator/operation.go @@ -788,7 +788,7 @@ func (b *codeGenOpBuilder) MakeParameter(receiver string, resolver *typeResolver return GenParameter{}, err } res.Child = &pi - // Propagates HasValidations from from child array + // Propagates HasValidations from child array hasChildValidations = pi.HasValidations } res.IsNullable = !param.Required && !param.AllowEmptyValue diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/LICENSE b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/LICENSE deleted file mode 100644 index 8dada3eda..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/NOTICE b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/NOTICE deleted file mode 100644 index 5d8cb5b72..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/NOTICE +++ /dev/null @@ -1 +0,0 @@ -Copyright 2012 Matt T. Proud (matt.proud@gmail.com) diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/.gitignore b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/.gitignore deleted file mode 100644 index e16fb946b..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/.gitignore +++ /dev/null @@ -1 +0,0 @@ -cover.dat diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/Makefile b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/Makefile deleted file mode 100644 index 81be21437..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/Makefile +++ /dev/null @@ -1,7 +0,0 @@ -all: - -cover: - go test -cover -v -coverprofile=cover.dat ./... - go tool cover -func cover.dat - -.PHONY: cover diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/decode.go b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/decode.go deleted file mode 100644 index 7c08e564f..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/decode.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2013 Matt T. Proud -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pbutil - -import ( - "encoding/binary" - "errors" - "io" - - "google.golang.org/protobuf/proto" -) - -// TODO: Give error package name prefix in next minor release. -var errInvalidVarint = errors.New("invalid varint32 encountered") - -// ReadDelimited decodes a message from the provided length-delimited stream, -// where the length is encoded as 32-bit varint prefix to the message body. -// It returns the total number of bytes read and any applicable error. This is -// roughly equivalent to the companion Java API's -// MessageLite#parseDelimitedFrom. As per the reader contract, this function -// calls r.Read repeatedly as required until exactly one message including its -// prefix is read and decoded (or an error has occurred). The function never -// reads more bytes from the stream than required. The function never returns -// an error if a message has been read and decoded correctly, even if the end -// of the stream has been reached in doing so. In that case, any subsequent -// calls return (0, io.EOF). -func ReadDelimited(r io.Reader, m proto.Message) (n int, err error) { - // TODO: Consider allowing the caller to specify a decode buffer in the - // next major version. - - // TODO: Consider using error wrapping to annotate error state in pass- - // through cases in the next minor version. - - // Per AbstractParser#parsePartialDelimitedFrom with - // CodedInputStream#readRawVarint32. - var headerBuf [binary.MaxVarintLen32]byte - var bytesRead, varIntBytes int - var messageLength uint64 - for varIntBytes == 0 { // i.e. no varint has been decoded yet. - if bytesRead >= len(headerBuf) { - return bytesRead, errInvalidVarint - } - // We have to read byte by byte here to avoid reading more bytes - // than required. Each read byte is appended to what we have - // read before. - newBytesRead, err := r.Read(headerBuf[bytesRead : bytesRead+1]) - if newBytesRead == 0 { - if err != nil { - return bytesRead, err - } - // A Reader should not return (0, nil); but if it does, it should - // be treated as no-op according to the Reader contract. - continue - } - bytesRead += newBytesRead - // Now present everything read so far to the varint decoder and - // see if a varint can be decoded already. - messageLength, varIntBytes = binary.Uvarint(headerBuf[:bytesRead]) - } - - messageBuf := make([]byte, messageLength) - newBytesRead, err := io.ReadFull(r, messageBuf) - bytesRead += newBytesRead - if err != nil { - return bytesRead, err - } - - return bytesRead, proto.Unmarshal(messageBuf, m) -} diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/doc.go b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/doc.go deleted file mode 100644 index c318385cb..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/doc.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2013 Matt T. Proud -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package pbutil provides record length-delimited Protocol Buffer streaming. -package pbutil diff --git a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/encode.go b/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/encode.go deleted file mode 100644 index e58dd9d29..000000000 --- a/vendor/github.com/matttproud/golang_protobuf_extensions/v2/pbutil/encode.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2013 Matt T. Proud -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pbutil - -import ( - "encoding/binary" - "io" - - "google.golang.org/protobuf/proto" -) - -// WriteDelimited encodes and dumps a message to the provided writer prefixed -// with a 32-bit varint indicating the length of the encoded message, producing -// a length-delimited record stream, which can be used to chain together -// encoded messages of the same type together in a file. It returns the total -// number of bytes written and any applicable error. This is roughly -// equivalent to the companion Java API's MessageLite#writeDelimitedTo. -func WriteDelimited(w io.Writer, m proto.Message) (n int, err error) { - // TODO: Consider allowing the caller to specify an encode buffer in the - // next major version. - - buffer, err := proto.Marshal(m) - if err != nil { - return 0, err - } - - var buf [binary.MaxVarintLen32]byte - encodedLength := binary.PutUvarint(buf[:], uint64(len(buffer))) - - sync, err := w.Write(buf[:encodedLength]) - if err != nil { - return sync, err - } - - n, err = w.Write(buffer) - return n + sync, err -} diff --git a/vendor/github.com/prometheus/common/expfmt/decode.go b/vendor/github.com/prometheus/common/expfmt/decode.go index 0ca86a3dc..b2b89b017 100644 --- a/vendor/github.com/prometheus/common/expfmt/decode.go +++ b/vendor/github.com/prometheus/common/expfmt/decode.go @@ -14,6 +14,7 @@ package expfmt import ( + "bufio" "fmt" "io" "math" @@ -21,8 +22,8 @@ import ( "net/http" dto "github.com/prometheus/client_model/go" + "google.golang.org/protobuf/encoding/protodelim" - "github.com/matttproud/golang_protobuf_extensions/v2/pbutil" "github.com/prometheus/common/model" ) @@ -44,7 +45,7 @@ func ResponseFormat(h http.Header) Format { mediatype, params, err := mime.ParseMediaType(ct) if err != nil { - return FmtUnknown + return fmtUnknown } const textType = "text/plain" @@ -52,28 +53,28 @@ func ResponseFormat(h http.Header) Format { switch mediatype { case ProtoType: if p, ok := params["proto"]; ok && p != ProtoProtocol { - return FmtUnknown + return fmtUnknown } if e, ok := params["encoding"]; ok && e != "delimited" { - return FmtUnknown + return fmtUnknown } - return FmtProtoDelim + return fmtProtoDelim case textType: if v, ok := params["version"]; ok && v != TextVersion { - return FmtUnknown + return fmtUnknown } - return FmtText + return fmtText } - return FmtUnknown + return fmtUnknown } // NewDecoder returns a new decoder based on the given input format. // If the input format does not imply otherwise, a text format decoder is returned. func NewDecoder(r io.Reader, format Format) Decoder { - switch format { - case FmtProtoDelim: + switch format.FormatType() { + case TypeProtoDelim: return &protoDecoder{r: r} } return &textDecoder{r: r} @@ -86,8 +87,10 @@ type protoDecoder struct { // Decode implements the Decoder interface. func (d *protoDecoder) Decode(v *dto.MetricFamily) error { - _, err := pbutil.ReadDelimited(d.r, v) - if err != nil { + opts := protodelim.UnmarshalOptions{ + MaxSize: -1, + } + if err := opts.UnmarshalFrom(bufio.NewReader(d.r), v); err != nil { return err } if !model.IsValidMetricName(model.LabelValue(v.GetName())) { diff --git a/vendor/github.com/prometheus/common/expfmt/encode.go b/vendor/github.com/prometheus/common/expfmt/encode.go index ca2140600..8fd806184 100644 --- a/vendor/github.com/prometheus/common/expfmt/encode.go +++ b/vendor/github.com/prometheus/common/expfmt/encode.go @@ -18,10 +18,12 @@ import ( "io" "net/http" - "github.com/matttproud/golang_protobuf_extensions/v2/pbutil" - "github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg" + "google.golang.org/protobuf/encoding/protodelim" "google.golang.org/protobuf/encoding/prototext" + "github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg" + "github.com/prometheus/common/model" + dto "github.com/prometheus/client_model/go" ) @@ -60,23 +62,32 @@ func (ec encoderCloser) Close() error { // as the support is still experimental. To include the option to negotiate // FmtOpenMetrics, use NegotiateOpenMetrics. func Negotiate(h http.Header) Format { + escapingScheme := Format(fmt.Sprintf("; escaping=%s", Format(model.NameEscapingScheme.String()))) for _, ac := range goautoneg.ParseAccept(h.Get(hdrAccept)) { + if escapeParam := ac.Params[model.EscapingKey]; escapeParam != "" { + switch Format(escapeParam) { + case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues: + escapingScheme = Format(fmt.Sprintf("; escaping=%s", escapeParam)) + default: + // If the escaping parameter is unknown, ignore it. + } + } ver := ac.Params["version"] if ac.Type+"/"+ac.SubType == ProtoType && ac.Params["proto"] == ProtoProtocol { switch ac.Params["encoding"] { case "delimited": - return FmtProtoDelim + return fmtProtoDelim + escapingScheme case "text": - return FmtProtoText + return fmtProtoText + escapingScheme case "compact-text": - return FmtProtoCompact + return fmtProtoCompact + escapingScheme } } if ac.Type == "text" && ac.SubType == "plain" && (ver == TextVersion || ver == "") { - return FmtText + return fmtText + escapingScheme } } - return FmtText + return fmtText + escapingScheme } // NegotiateIncludingOpenMetrics works like Negotiate but includes @@ -84,29 +95,40 @@ func Negotiate(h http.Header) Format { // temporary and will disappear once FmtOpenMetrics is fully supported and as // such may be negotiated by the normal Negotiate function. func NegotiateIncludingOpenMetrics(h http.Header) Format { + escapingScheme := Format(fmt.Sprintf("; escaping=%s", Format(model.NameEscapingScheme.String()))) for _, ac := range goautoneg.ParseAccept(h.Get(hdrAccept)) { + if escapeParam := ac.Params[model.EscapingKey]; escapeParam != "" { + switch Format(escapeParam) { + case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues: + escapingScheme = Format(fmt.Sprintf("; escaping=%s", escapeParam)) + default: + // If the escaping parameter is unknown, ignore it. + } + } ver := ac.Params["version"] if ac.Type+"/"+ac.SubType == ProtoType && ac.Params["proto"] == ProtoProtocol { switch ac.Params["encoding"] { case "delimited": - return FmtProtoDelim + return fmtProtoDelim + escapingScheme case "text": - return FmtProtoText + return fmtProtoText + escapingScheme case "compact-text": - return FmtProtoCompact + return fmtProtoCompact + escapingScheme } } if ac.Type == "text" && ac.SubType == "plain" && (ver == TextVersion || ver == "") { - return FmtText + return fmtText + escapingScheme } if ac.Type+"/"+ac.SubType == OpenMetricsType && (ver == OpenMetricsVersion_0_0_1 || ver == OpenMetricsVersion_1_0_0 || ver == "") { - if ver == OpenMetricsVersion_1_0_0 { - return FmtOpenMetrics_1_0_0 + switch ver { + case OpenMetricsVersion_1_0_0: + return fmtOpenMetrics_1_0_0 + escapingScheme + default: + return fmtOpenMetrics_0_0_1 + escapingScheme } - return FmtOpenMetrics_0_0_1 } } - return FmtText + return fmtText + escapingScheme } // NewEncoder returns a new encoder based on content type negotiation. All @@ -115,44 +137,48 @@ func NegotiateIncludingOpenMetrics(h http.Header) Format { // for FmtOpenMetrics, but a future (breaking) release will add the Close method // to the Encoder interface directly. The current version of the Encoder // interface is kept for backwards compatibility. +// In cases where the Format does not allow for UTF-8 names, the global +// NameEscapingScheme will be applied. func NewEncoder(w io.Writer, format Format) Encoder { - switch format { - case FmtProtoDelim: + escapingScheme := format.ToEscapingScheme() + + switch format.FormatType() { + case TypeProtoDelim: return encoderCloser{ encode: func(v *dto.MetricFamily) error { - _, err := pbutil.WriteDelimited(w, v) + _, err := protodelim.MarshalTo(w, v) return err }, close: func() error { return nil }, } - case FmtProtoCompact: + case TypeProtoCompact: return encoderCloser{ encode: func(v *dto.MetricFamily) error { - _, err := fmt.Fprintln(w, v.String()) + _, err := fmt.Fprintln(w, model.EscapeMetricFamily(v, escapingScheme).String()) return err }, close: func() error { return nil }, } - case FmtProtoText: + case TypeProtoText: return encoderCloser{ encode: func(v *dto.MetricFamily) error { - _, err := fmt.Fprintln(w, prototext.Format(v)) + _, err := fmt.Fprintln(w, prototext.Format(model.EscapeMetricFamily(v, escapingScheme))) return err }, close: func() error { return nil }, } - case FmtText: + case TypeTextPlain: return encoderCloser{ encode: func(v *dto.MetricFamily) error { - _, err := MetricFamilyToText(w, v) + _, err := MetricFamilyToText(w, model.EscapeMetricFamily(v, escapingScheme)) return err }, close: func() error { return nil }, } - case FmtOpenMetrics_0_0_1, FmtOpenMetrics_1_0_0: + case TypeOpenMetrics: return encoderCloser{ encode: func(v *dto.MetricFamily) error { - _, err := MetricFamilyToOpenMetrics(w, v) + _, err := MetricFamilyToOpenMetrics(w, model.EscapeMetricFamily(v, escapingScheme)) return err }, close: func() error { diff --git a/vendor/github.com/prometheus/common/expfmt/expfmt.go b/vendor/github.com/prometheus/common/expfmt/expfmt.go index c4cb20f0d..6fc9555e3 100644 --- a/vendor/github.com/prometheus/common/expfmt/expfmt.go +++ b/vendor/github.com/prometheus/common/expfmt/expfmt.go @@ -14,30 +14,154 @@ // Package expfmt contains tools for reading and writing Prometheus metrics. package expfmt +import ( + "strings" + + "github.com/prometheus/common/model" +) + // Format specifies the HTTP content type of the different wire protocols. type Format string -// Constants to assemble the Content-Type values for the different wire protocols. +// Constants to assemble the Content-Type values for the different wire +// protocols. The Content-Type strings here are all for the legacy exposition +// formats, where valid characters for metric names and label names are limited. +// Support for arbitrary UTF-8 characters in those names is already partially +// implemented in this module (see model.ValidationScheme), but to actually use +// it on the wire, new content-type strings will have to be agreed upon and +// added here. const ( TextVersion = "0.0.4" ProtoType = `application/vnd.google.protobuf` ProtoProtocol = `io.prometheus.client.MetricFamily` - ProtoFmt = ProtoType + "; proto=" + ProtoProtocol + ";" + protoFmt = ProtoType + "; proto=" + ProtoProtocol + ";" OpenMetricsType = `application/openmetrics-text` OpenMetricsVersion_0_0_1 = "0.0.1" OpenMetricsVersion_1_0_0 = "1.0.0" - // The Content-Type values for the different wire protocols. - FmtUnknown Format = `<unknown>` - FmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8` - FmtProtoDelim Format = ProtoFmt + ` encoding=delimited` - FmtProtoText Format = ProtoFmt + ` encoding=text` - FmtProtoCompact Format = ProtoFmt + ` encoding=compact-text` - FmtOpenMetrics_1_0_0 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_1_0_0 + `; charset=utf-8` - FmtOpenMetrics_0_0_1 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_0_0_1 + `; charset=utf-8` + // The Content-Type values for the different wire protocols. Note that these + // values are now unexported. If code was relying on comparisons to these + // constants, instead use FormatType(). + fmtUnknown Format = `<unknown>` + fmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8` + fmtProtoDelim Format = protoFmt + ` encoding=delimited` + fmtProtoText Format = protoFmt + ` encoding=text` + fmtProtoCompact Format = protoFmt + ` encoding=compact-text` + fmtOpenMetrics_1_0_0 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_1_0_0 + `; charset=utf-8` + fmtOpenMetrics_0_0_1 Format = OpenMetricsType + `; version=` + OpenMetricsVersion_0_0_1 + `; charset=utf-8` ) const ( hdrContentType = "Content-Type" hdrAccept = "Accept" ) + +// FormatType is a Go enum representing the overall category for the given +// Format. As the number of Format permutations increases, doing basic string +// comparisons are not feasible, so this enum captures the most useful +// high-level attribute of the Format string. +type FormatType int + +const ( + TypeUnknown = iota + TypeProtoCompact + TypeProtoDelim + TypeProtoText + TypeTextPlain + TypeOpenMetrics +) + +// NewFormat generates a new Format from the type provided. Mostly used for +// tests, most Formats should be generated as part of content negotiation in +// encode.go. +func NewFormat(t FormatType) Format { + switch t { + case TypeProtoCompact: + return fmtProtoCompact + case TypeProtoDelim: + return fmtProtoDelim + case TypeProtoText: + return fmtProtoText + case TypeTextPlain: + return fmtText + case TypeOpenMetrics: + return fmtOpenMetrics_1_0_0 + default: + return fmtUnknown + } +} + +// FormatType deduces an overall FormatType for the given format. +func (f Format) FormatType() FormatType { + toks := strings.Split(string(f), ";") + if len(toks) < 2 { + return TypeUnknown + } + + params := make(map[string]string) + for i, t := range toks { + if i == 0 { + continue + } + args := strings.Split(t, "=") + if len(args) != 2 { + continue + } + params[strings.TrimSpace(args[0])] = strings.TrimSpace(args[1]) + } + + switch strings.TrimSpace(toks[0]) { + case ProtoType: + if params["proto"] != ProtoProtocol { + return TypeUnknown + } + switch params["encoding"] { + case "delimited": + return TypeProtoDelim + case "text": + return TypeProtoText + case "compact-text": + return TypeProtoCompact + default: + return TypeUnknown + } + case OpenMetricsType: + if params["charset"] != "utf-8" { + return TypeUnknown + } + return TypeOpenMetrics + case "text/plain": + v, ok := params["version"] + if !ok { + return TypeTextPlain + } + if v == TextVersion { + return TypeTextPlain + } + return TypeUnknown + default: + return TypeUnknown + } +} + +// ToEscapingScheme returns an EscapingScheme depending on the Format. Iff the +// Format contains a escaping=allow-utf-8 term, it will select NoEscaping. If a valid +// "escaping" term exists, that will be used. Otherwise, the global default will +// be returned. +func (format Format) ToEscapingScheme() model.EscapingScheme { + for _, p := range strings.Split(string(format), ";") { + toks := strings.Split(p, "=") + if len(toks) != 2 { + continue + } + key, value := strings.TrimSpace(toks[0]), strings.TrimSpace(toks[1]) + if key == model.EscapingKey { + scheme, err := model.ToEscapingScheme(value) + if err != nil { + return model.NameEscapingScheme + } + return scheme + } + } + return model.NameEscapingScheme +} diff --git a/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go b/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go index 21cdddcf0..5622578ed 100644 --- a/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go +++ b/vendor/github.com/prometheus/common/expfmt/openmetrics_create.go @@ -35,6 +35,18 @@ import ( // sanity checks. If the input contains duplicate metrics or invalid metric or // label names, the conversion will result in invalid text format output. // +// If metric names conform to the legacy validation pattern, they will be placed +// outside the brackets in the traditional way, like `foo{}`. If the metric name +// fails the legacy validation check, it will be placed quoted inside the +// brackets: `{"foo"}`. As stated above, the input is assumed to be santized and +// no error will be thrown in this case. +// +// Similar to metric names, if label names conform to the legacy validation +// pattern, they will be unquoted as normal, like `foo{bar="baz"}`. If the label +// name fails the legacy validation check, it will be quoted: +// `foo{"bar"="baz"}`. As stated above, the input is assumed to be santized and +// no error will be thrown in this case. +// // This function fulfills the type 'expfmt.encoder'. // // Note that OpenMetrics requires a final `# EOF` line. Since this function acts @@ -98,7 +110,7 @@ func MetricFamilyToOpenMetrics(out io.Writer, in *dto.MetricFamily) (written int if err != nil { return } - n, err = w.WriteString(shortName) + n, err = writeName(w, shortName) written += n if err != nil { return @@ -124,7 +136,7 @@ func MetricFamilyToOpenMetrics(out io.Writer, in *dto.MetricFamily) (written int if err != nil { return } - n, err = w.WriteString(shortName) + n, err = writeName(w, shortName) written += n if err != nil { return @@ -303,21 +315,9 @@ func writeOpenMetricsSample( floatValue float64, intValue uint64, useIntValue bool, exemplar *dto.Exemplar, ) (int, error) { - var written int - n, err := w.WriteString(name) - written += n - if err != nil { - return written, err - } - if suffix != "" { - n, err = w.WriteString(suffix) - written += n - if err != nil { - return written, err - } - } - n, err = writeOpenMetricsLabelPairs( - w, metric.Label, additionalLabelName, additionalLabelValue, + written := 0 + n, err := writeOpenMetricsNameAndLabelPairs( + w, name+suffix, metric.Label, additionalLabelName, additionalLabelValue, ) written += n if err != nil { @@ -365,27 +365,58 @@ func writeOpenMetricsSample( return written, nil } -// writeOpenMetricsLabelPairs works like writeOpenMetrics but formats the float -// in OpenMetrics style. -func writeOpenMetricsLabelPairs( +// writeOpenMetricsNameAndLabelPairs works like writeOpenMetricsSample but +// formats the float in OpenMetrics style. +func writeOpenMetricsNameAndLabelPairs( w enhancedWriter, + name string, in []*dto.LabelPair, additionalLabelName string, additionalLabelValue float64, ) (int, error) { - if len(in) == 0 && additionalLabelName == "" { - return 0, nil - } var ( - written int - separator byte = '{' + written int + separator byte = '{' + metricInsideBraces = false ) + + if name != "" { + // If the name does not pass the legacy validity check, we must put the + // metric name inside the braces, quoted. + if !model.IsValidLegacyMetricName(model.LabelValue(name)) { + metricInsideBraces = true + err := w.WriteByte(separator) + written++ + if err != nil { + return written, err + } + separator = ',' + } + + n, err := writeName(w, name) + written += n + if err != nil { + return written, err + } + } + + if len(in) == 0 && additionalLabelName == "" { + if metricInsideBraces { + err := w.WriteByte('}') + written++ + if err != nil { + return written, err + } + } + return written, nil + } + for _, lp := range in { err := w.WriteByte(separator) written++ if err != nil { return written, err } - n, err := w.WriteString(lp.GetName()) + n, err := writeName(w, lp.GetName()) written += n if err != nil { return written, err @@ -451,7 +482,7 @@ func writeExemplar(w enhancedWriter, e *dto.Exemplar) (int, error) { if err != nil { return written, err } - n, err = writeOpenMetricsLabelPairs(w, e.Label, "", 0) + n, err = writeOpenMetricsNameAndLabelPairs(w, "", e.Label, "", 0) written += n if err != nil { return written, err diff --git a/vendor/github.com/prometheus/common/expfmt/text_create.go b/vendor/github.com/prometheus/common/expfmt/text_create.go index 2946b8f1a..f9b8265a9 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_create.go +++ b/vendor/github.com/prometheus/common/expfmt/text_create.go @@ -62,6 +62,18 @@ var ( // contains duplicate metrics or invalid metric or label names, the conversion // will result in invalid text format output. // +// If metric names conform to the legacy validation pattern, they will be placed +// outside the brackets in the traditional way, like `foo{}`. If the metric name +// fails the legacy validation check, it will be placed quoted inside the +// brackets: `{"foo"}`. As stated above, the input is assumed to be santized and +// no error will be thrown in this case. +// +// Similar to metric names, if label names conform to the legacy validation +// pattern, they will be unquoted as normal, like `foo{bar="baz"}`. If the label +// name fails the legacy validation check, it will be quoted: +// `foo{"bar"="baz"}`. As stated above, the input is assumed to be santized and +// no error will be thrown in this case. +// // This method fulfills the type 'prometheus.encoder'. func MetricFamilyToText(out io.Writer, in *dto.MetricFamily) (written int, err error) { // Fail-fast checks. @@ -98,7 +110,7 @@ func MetricFamilyToText(out io.Writer, in *dto.MetricFamily) (written int, err e if err != nil { return } - n, err = w.WriteString(name) + n, err = writeName(w, name) written += n if err != nil { return @@ -124,7 +136,7 @@ func MetricFamilyToText(out io.Writer, in *dto.MetricFamily) (written int, err e if err != nil { return } - n, err = w.WriteString(name) + n, err = writeName(w, name) written += n if err != nil { return @@ -280,21 +292,9 @@ func writeSample( additionalLabelName string, additionalLabelValue float64, value float64, ) (int, error) { - var written int - n, err := w.WriteString(name) - written += n - if err != nil { - return written, err - } - if suffix != "" { - n, err = w.WriteString(suffix) - written += n - if err != nil { - return written, err - } - } - n, err = writeLabelPairs( - w, metric.Label, additionalLabelName, additionalLabelValue, + written := 0 + n, err := writeNameAndLabelPairs( + w, name+suffix, metric.Label, additionalLabelName, additionalLabelValue, ) written += n if err != nil { @@ -330,32 +330,64 @@ func writeSample( return written, nil } -// writeLabelPairs converts a slice of LabelPair proto messages plus the -// explicitly given additional label pair into text formatted as required by the -// text format and writes it to 'w'. An empty slice in combination with an empty -// string 'additionalLabelName' results in nothing being written. Otherwise, the -// label pairs are written, escaped as required by the text format, and enclosed -// in '{...}'. The function returns the number of bytes written and any error -// encountered. -func writeLabelPairs( +// writeNameAndLabelPairs converts a slice of LabelPair proto messages plus the +// explicitly given metric name and additional label pair into text formatted as +// required by the text format and writes it to 'w'. An empty slice in +// combination with an empty string 'additionalLabelName' results in nothing +// being written. Otherwise, the label pairs are written, escaped as required by +// the text format, and enclosed in '{...}'. The function returns the number of +// bytes written and any error encountered. If the metric name is not +// legacy-valid, it will be put inside the brackets as well. Legacy-invalid +// label names will also be quoted. +func writeNameAndLabelPairs( w enhancedWriter, + name string, in []*dto.LabelPair, additionalLabelName string, additionalLabelValue float64, ) (int, error) { - if len(in) == 0 && additionalLabelName == "" { - return 0, nil - } var ( - written int - separator byte = '{' + written int + separator byte = '{' + metricInsideBraces = false ) + + if name != "" { + // If the name does not pass the legacy validity check, we must put the + // metric name inside the braces. + if !model.IsValidLegacyMetricName(model.LabelValue(name)) { + metricInsideBraces = true + err := w.WriteByte(separator) + written++ + if err != nil { + return written, err + } + separator = ',' + } + n, err := writeName(w, name) + written += n + if err != nil { + return written, err + } + } + + if len(in) == 0 && additionalLabelName == "" { + if metricInsideBraces { + err := w.WriteByte('}') + written++ + if err != nil { + return written, err + } + } + return written, nil + } + for _, lp := range in { err := w.WriteByte(separator) written++ if err != nil { return written, err } - n, err := w.WriteString(lp.GetName()) + n, err := writeName(w, lp.GetName()) written += n if err != nil { return written, err @@ -462,3 +494,27 @@ func writeInt(w enhancedWriter, i int64) (int, error) { numBufPool.Put(bp) return written, err } + +// writeName writes a string as-is if it complies with the legacy naming +// scheme, or escapes it in double quotes if not. +func writeName(w enhancedWriter, name string) (int, error) { + if model.IsValidLegacyMetricName(model.LabelValue(name)) { + return w.WriteString(name) + } + var written int + var err error + err = w.WriteByte('"') + written++ + if err != nil { + return written, err + } + var n int + n, err = writeEscapedString(w, name, true) + written += n + if err != nil { + return written, err + } + err = w.WriteByte('"') + written++ + return written, err +} diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go index 35db1cc9d..26490211a 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse.go @@ -16,6 +16,7 @@ package expfmt import ( "bufio" "bytes" + "errors" "fmt" "io" "math" @@ -24,8 +25,9 @@ import ( dto "github.com/prometheus/client_model/go" - "github.com/prometheus/common/model" "google.golang.org/protobuf/proto" + + "github.com/prometheus/common/model" ) // A stateFn is a function that represents a state in a state machine. By @@ -112,7 +114,7 @@ func (p *TextParser) TextToMetricFamilies(in io.Reader) (map[string]*dto.MetricF // stream. Turn this error into something nicer and more // meaningful. (io.EOF is often used as a signal for the legitimate end // of an input stream.) - if p.err == io.EOF { + if p.err != nil && errors.Is(p.err, io.EOF) { p.parseError("unexpected end of input stream") } return p.metricFamiliesByName, p.err @@ -146,7 +148,7 @@ func (p *TextParser) startOfLine() stateFn { // which is not an error but the signal that we are done. // Any other error that happens to align with the start of // a line is still an error. - if p.err == io.EOF { + if errors.Is(p.err, io.EOF) { p.err = nil } return nil diff --git a/vendor/github.com/prometheus/common/model/alert.go b/vendor/github.com/prometheus/common/model/alert.go index 35e739c7a..178fdbaf6 100644 --- a/vendor/github.com/prometheus/common/model/alert.go +++ b/vendor/github.com/prometheus/common/model/alert.go @@ -90,13 +90,13 @@ func (a *Alert) Validate() error { return fmt.Errorf("start time must be before end time") } if err := a.Labels.Validate(); err != nil { - return fmt.Errorf("invalid label set: %s", err) + return fmt.Errorf("invalid label set: %w", err) } if len(a.Labels) == 0 { return fmt.Errorf("at least one label pair required") } if err := a.Annotations.Validate(); err != nil { - return fmt.Errorf("invalid annotations: %s", err) + return fmt.Errorf("invalid annotations: %w", err) } return nil } diff --git a/vendor/github.com/prometheus/common/model/labels.go b/vendor/github.com/prometheus/common/model/labels.go index ef8956335..3317ce22f 100644 --- a/vendor/github.com/prometheus/common/model/labels.go +++ b/vendor/github.com/prometheus/common/model/labels.go @@ -97,17 +97,25 @@ var LabelNameRE = regexp.MustCompile("^[a-zA-Z_][a-zA-Z0-9_]*$") // therewith. type LabelName string -// IsValid is true iff the label name matches the pattern of LabelNameRE. This -// method, however, does not use LabelNameRE for the check but a much faster -// hardcoded implementation. +// IsValid returns true iff name matches the pattern of LabelNameRE for legacy +// names, and iff it's valid UTF-8 if NameValidationScheme is set to +// UTF8Validation. For the legacy matching, it does not use LabelNameRE for the +// check but a much faster hardcoded implementation. func (ln LabelName) IsValid() bool { if len(ln) == 0 { return false } - for i, b := range ln { - if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { - return false + switch NameValidationScheme { + case LegacyValidation: + for i, b := range ln { + if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || (b >= '0' && b <= '9' && i > 0)) { + return false + } } + case UTF8Validation: + return utf8.ValidString(string(ln)) + default: + panic(fmt.Sprintf("Invalid name validation scheme requested: %d", NameValidationScheme)) } return true } @@ -164,7 +172,7 @@ func (l LabelNames) String() string { // A LabelValue is an associated value for a LabelName. type LabelValue string -// IsValid returns true iff the string is a valid UTF8. +// IsValid returns true iff the string is a valid UTF-8. func (lv LabelValue) IsValid() bool { return utf8.ValidString(string(lv)) } diff --git a/vendor/github.com/prometheus/common/model/metadata.go b/vendor/github.com/prometheus/common/model/metadata.go new file mode 100644 index 000000000..447ab8ad6 --- /dev/null +++ b/vendor/github.com/prometheus/common/model/metadata.go @@ -0,0 +1,28 @@ +// Copyright 2023 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +// MetricType represents metric type values. +type MetricType string + +const ( + MetricTypeCounter = MetricType("counter") + MetricTypeGauge = MetricType("gauge") + MetricTypeHistogram = MetricType("histogram") + MetricTypeGaugeHistogram = MetricType("gaugehistogram") + MetricTypeSummary = MetricType("summary") + MetricTypeInfo = MetricType("info") + MetricTypeStateset = MetricType("stateset") + MetricTypeUnknown = MetricType("unknown") +) diff --git a/vendor/github.com/prometheus/common/model/metric.go b/vendor/github.com/prometheus/common/model/metric.go index 00804b7fe..0bd29b3a3 100644 --- a/vendor/github.com/prometheus/common/model/metric.go +++ b/vendor/github.com/prometheus/common/model/metric.go @@ -18,15 +18,84 @@ import ( "regexp" "sort" "strings" + "unicode/utf8" + + dto "github.com/prometheus/client_model/go" + "google.golang.org/protobuf/proto" ) var ( - // MetricNameRE is a regular expression matching valid metric - // names. Note that the IsValidMetricName function performs the same - // check but faster than a match with this regular expression. - MetricNameRE = regexp.MustCompile(`^[a-zA-Z_:][a-zA-Z0-9_:]*$`) + // NameValidationScheme determines the method of name validation to be used by + // all calls to IsValidMetricName() and LabelName IsValid(). Setting UTF-8 mode + // in isolation from other components that don't support UTF-8 may result in + // bugs or other undefined behavior. This value is intended to be set by + // UTF-8-aware binaries as part of their startup. To avoid need for locking, + // this value should be set once, ideally in an init(), before multiple + // goroutines are started. + NameValidationScheme = LegacyValidation + + // NameEscapingScheme defines the default way that names will be + // escaped when presented to systems that do not support UTF-8 names. If the + // Content-Type "escaping" term is specified, that will override this value. + NameEscapingScheme = ValueEncodingEscaping +) + +// ValidationScheme is a Go enum for determining how metric and label names will +// be validated by this library. +type ValidationScheme int + +const ( + // LegacyValidation is a setting that requirets that metric and label names + // conform to the original Prometheus character requirements described by + // MetricNameRE and LabelNameRE. + LegacyValidation ValidationScheme = iota + + // UTF8Validation only requires that metric and label names be valid UTF-8 + // strings. + UTF8Validation +) + +type EscapingScheme int + +const ( + // NoEscaping indicates that a name will not be escaped. Unescaped names that + // do not conform to the legacy validity check will use a new exposition + // format syntax that will be officially standardized in future versions. + NoEscaping EscapingScheme = iota + + // UnderscoreEscaping replaces all legacy-invalid characters with underscores. + UnderscoreEscaping + + // DotsEscaping is similar to UnderscoreEscaping, except that dots are + // converted to `_dot_` and pre-existing underscores are converted to `__`. + DotsEscaping + + // ValueEncodingEscaping prepends the name with `U__` and replaces all invalid + // characters with the unicode value, surrounded by underscores. Single + // underscores are replaced with double underscores. + ValueEncodingEscaping +) + +const ( + // EscapingKey is the key in an Accept or Content-Type header that defines how + // metric and label names that do not conform to the legacy character + // requirements should be escaped when being scraped by a legacy prometheus + // system. If a system does not explicitly pass an escaping parameter in the + // Accept header, the default NameEscapingScheme will be used. + EscapingKey = "escaping" + + // Possible values for Escaping Key: + AllowUTF8 = "allow-utf-8" // No escaping required. + EscapeUnderscores = "underscores" + EscapeDots = "dots" + EscapeValues = "values" ) +// MetricNameRE is a regular expression matching valid metric +// names. Note that the IsValidMetricName function performs the same +// check but faster than a match with this regular expression. +var MetricNameRE = regexp.MustCompile(`^[a-zA-Z_:][a-zA-Z0-9_:]*$`) + // A Metric is similar to a LabelSet, but the key difference is that a Metric is // a singleton and refers to one and only one stream of samples. type Metric LabelSet @@ -86,17 +155,302 @@ func (m Metric) FastFingerprint() Fingerprint { return LabelSet(m).FastFingerprint() } -// IsValidMetricName returns true iff name matches the pattern of MetricNameRE. +// IsValidMetricName returns true iff name matches the pattern of MetricNameRE +// for legacy names, and iff it's valid UTF-8 if the UTF8Validation scheme is +// selected. +func IsValidMetricName(n LabelValue) bool { + switch NameValidationScheme { + case LegacyValidation: + return IsValidLegacyMetricName(n) + case UTF8Validation: + if len(n) == 0 { + return false + } + return utf8.ValidString(string(n)) + default: + panic(fmt.Sprintf("Invalid name validation scheme requested: %d", NameValidationScheme)) + } +} + +// IsValidLegacyMetricName is similar to IsValidMetricName but always uses the +// legacy validation scheme regardless of the value of NameValidationScheme. // This function, however, does not use MetricNameRE for the check but a much // faster hardcoded implementation. -func IsValidMetricName(n LabelValue) bool { +func IsValidLegacyMetricName(n LabelValue) bool { if len(n) == 0 { return false } for i, b := range n { - if !((b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || b == ':' || (b >= '0' && b <= '9' && i > 0)) { + if !isValidLegacyRune(b, i) { return false } } return true } + +// EscapeMetricFamily escapes the given metric names and labels with the given +// escaping scheme. Returns a new object that uses the same pointers to fields +// when possible and creates new escaped versions so as not to mutate the +// input. +func EscapeMetricFamily(v *dto.MetricFamily, scheme EscapingScheme) *dto.MetricFamily { + if v == nil { + return nil + } + + if scheme == NoEscaping { + return v + } + + out := &dto.MetricFamily{ + Help: v.Help, + Type: v.Type, + } + + // If the name is nil, copy as-is, don't try to escape. + if v.Name == nil || IsValidLegacyMetricName(LabelValue(v.GetName())) { + out.Name = v.Name + } else { + out.Name = proto.String(EscapeName(v.GetName(), scheme)) + } + for _, m := range v.Metric { + if !metricNeedsEscaping(m) { + out.Metric = append(out.Metric, m) + continue + } + + escaped := &dto.Metric{ + Gauge: m.Gauge, + Counter: m.Counter, + Summary: m.Summary, + Untyped: m.Untyped, + Histogram: m.Histogram, + TimestampMs: m.TimestampMs, + } + + for _, l := range m.Label { + if l.GetName() == MetricNameLabel { + if l.Value == nil || IsValidLegacyMetricName(LabelValue(l.GetValue())) { + escaped.Label = append(escaped.Label, l) + continue + } + escaped.Label = append(escaped.Label, &dto.LabelPair{ + Name: proto.String(MetricNameLabel), + Value: proto.String(EscapeName(l.GetValue(), scheme)), + }) + continue + } + if l.Name == nil || IsValidLegacyMetricName(LabelValue(l.GetName())) { + escaped.Label = append(escaped.Label, l) + continue + } + escaped.Label = append(escaped.Label, &dto.LabelPair{ + Name: proto.String(EscapeName(l.GetName(), scheme)), + Value: l.Value, + }) + } + out.Metric = append(out.Metric, escaped) + } + return out +} + +func metricNeedsEscaping(m *dto.Metric) bool { + for _, l := range m.Label { + if l.GetName() == MetricNameLabel && !IsValidLegacyMetricName(LabelValue(l.GetValue())) { + return true + } + if !IsValidLegacyMetricName(LabelValue(l.GetName())) { + return true + } + } + return false +} + +const ( + lowerhex = "0123456789abcdef" +) + +// EscapeName escapes the incoming name according to the provided escaping +// scheme. Depending on the rules of escaping, this may cause no change in the +// string that is returned. (Especially NoEscaping, which by definition is a +// noop). This function does not do any validation of the name. +func EscapeName(name string, scheme EscapingScheme) string { + if len(name) == 0 { + return name + } + var escaped strings.Builder + switch scheme { + case NoEscaping: + return name + case UnderscoreEscaping: + if IsValidLegacyMetricName(LabelValue(name)) { + return name + } + for i, b := range name { + if isValidLegacyRune(b, i) { + escaped.WriteRune(b) + } else { + escaped.WriteRune('_') + } + } + return escaped.String() + case DotsEscaping: + // Do not early return for legacy valid names, we still escape underscores. + for i, b := range name { + if b == '_' { + escaped.WriteString("__") + } else if b == '.' { + escaped.WriteString("_dot_") + } else if isValidLegacyRune(b, i) { + escaped.WriteRune(b) + } else { + escaped.WriteRune('_') + } + } + return escaped.String() + case ValueEncodingEscaping: + if IsValidLegacyMetricName(LabelValue(name)) { + return name + } + escaped.WriteString("U__") + for i, b := range name { + if isValidLegacyRune(b, i) { + escaped.WriteRune(b) + } else if !utf8.ValidRune(b) { + escaped.WriteString("_FFFD_") + } else if b < 0x100 { + escaped.WriteRune('_') + for s := 4; s >= 0; s -= 4 { + escaped.WriteByte(lowerhex[b>>uint(s)&0xF]) + } + escaped.WriteRune('_') + } else if b < 0x10000 { + escaped.WriteRune('_') + for s := 12; s >= 0; s -= 4 { + escaped.WriteByte(lowerhex[b>>uint(s)&0xF]) + } + escaped.WriteRune('_') + } + } + return escaped.String() + default: + panic(fmt.Sprintf("invalid escaping scheme %d", scheme)) + } +} + +// lower function taken from strconv.atoi +func lower(c byte) byte { + return c | ('x' - 'X') +} + +// UnescapeName unescapes the incoming name according to the provided escaping +// scheme if possible. Some schemes are partially or totally non-roundtripable. +// If any error is enountered, returns the original input. +func UnescapeName(name string, scheme EscapingScheme) string { + if len(name) == 0 { + return name + } + switch scheme { + case NoEscaping: + return name + case UnderscoreEscaping: + // It is not possible to unescape from underscore replacement. + return name + case DotsEscaping: + name = strings.ReplaceAll(name, "_dot_", ".") + name = strings.ReplaceAll(name, "__", "_") + return name + case ValueEncodingEscaping: + escapedName, found := strings.CutPrefix(name, "U__") + if !found { + return name + } + + var unescaped strings.Builder + TOP: + for i := 0; i < len(escapedName); i++ { + // All non-underscores are treated normally. + if escapedName[i] != '_' { + unescaped.WriteByte(escapedName[i]) + continue + } + i++ + if i >= len(escapedName) { + return name + } + // A double underscore is a single underscore. + if escapedName[i] == '_' { + unescaped.WriteByte('_') + continue + } + // We think we are in a UTF-8 code, process it. + var utf8Val uint + for j := 0; i < len(escapedName); j++ { + // This is too many characters for a utf8 value. + if j > 4 { + return name + } + // Found a closing underscore, convert to a rune, check validity, and append. + if escapedName[i] == '_' { + utf8Rune := rune(utf8Val) + if !utf8.ValidRune(utf8Rune) { + return name + } + unescaped.WriteRune(utf8Rune) + continue TOP + } + r := lower(escapedName[i]) + utf8Val *= 16 + if r >= '0' && r <= '9' { + utf8Val += uint(r) - '0' + } else if r >= 'a' && r <= 'f' { + utf8Val += uint(r) - 'a' + 10 + } else { + return name + } + i++ + } + // Didn't find closing underscore, invalid. + return name + } + return unescaped.String() + default: + panic(fmt.Sprintf("invalid escaping scheme %d", scheme)) + } +} + +func isValidLegacyRune(b rune, i int) bool { + return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' || b == ':' || (b >= '0' && b <= '9' && i > 0) +} + +func (e EscapingScheme) String() string { + switch e { + case NoEscaping: + return AllowUTF8 + case UnderscoreEscaping: + return EscapeUnderscores + case DotsEscaping: + return EscapeDots + case ValueEncodingEscaping: + return EscapeValues + default: + panic(fmt.Sprintf("unknown format scheme %d", e)) + } +} + +func ToEscapingScheme(s string) (EscapingScheme, error) { + if s == "" { + return NoEscaping, fmt.Errorf("got empty string instead of escaping scheme") + } + switch s { + case AllowUTF8: + return NoEscaping, nil + case EscapeUnderscores: + return UnderscoreEscaping, nil + case EscapeDots: + return DotsEscaping, nil + case EscapeValues: + return ValueEncodingEscaping, nil + default: + return NoEscaping, fmt.Errorf("unknown format scheme " + s) + } +} diff --git a/vendor/github.com/prometheus/common/model/signature.go b/vendor/github.com/prometheus/common/model/signature.go index 8762b13c6..dc8a0026c 100644 --- a/vendor/github.com/prometheus/common/model/signature.go +++ b/vendor/github.com/prometheus/common/model/signature.go @@ -22,10 +22,8 @@ import ( // when calculating their combined hash value (aka signature aka fingerprint). const SeparatorByte byte = 255 -var ( - // cache the signature of an empty label set. - emptyLabelSignature = hashNew() -) +// cache the signature of an empty label set. +var emptyLabelSignature = hashNew() // LabelsToSignature returns a quasi-unique signature (i.e., fingerprint) for a // given label set. (Collisions are possible but unlikely if the number of label diff --git a/vendor/github.com/prometheus/common/model/silence.go b/vendor/github.com/prometheus/common/model/silence.go index bb99889d2..910b0b71f 100644 --- a/vendor/github.com/prometheus/common/model/silence.go +++ b/vendor/github.com/prometheus/common/model/silence.go @@ -81,7 +81,7 @@ func (s *Silence) Validate() error { } for _, m := range s.Matchers { if err := m.Validate(); err != nil { - return fmt.Errorf("invalid matcher: %s", err) + return fmt.Errorf("invalid matcher: %w", err) } } if s.StartsAt.IsZero() { diff --git a/vendor/github.com/prometheus/common/model/value.go b/vendor/github.com/prometheus/common/model/value.go index 9eb440413..8050637d8 100644 --- a/vendor/github.com/prometheus/common/model/value.go +++ b/vendor/github.com/prometheus/common/model/value.go @@ -21,14 +21,12 @@ import ( "strings" ) -var ( - // ZeroSample is the pseudo zero-value of Sample used to signal a - // non-existing sample. It is a Sample with timestamp Earliest, value 0.0, - // and metric nil. Note that the natural zero value of Sample has a timestamp - // of 0, which is possible to appear in a real Sample and thus not suitable - // to signal a non-existing Sample. - ZeroSample = Sample{Timestamp: Earliest} -) +// ZeroSample is the pseudo zero-value of Sample used to signal a +// non-existing sample. It is a Sample with timestamp Earliest, value 0.0, +// and metric nil. Note that the natural zero value of Sample has a timestamp +// of 0, which is possible to appear in a real Sample and thus not suitable +// to signal a non-existing Sample. +var ZeroSample = Sample{Timestamp: Earliest} // Sample is a sample pair associated with a metric. A single sample must either // define Value or Histogram but not both. Histogram == nil implies the Value @@ -274,7 +272,7 @@ func (s *Scalar) UnmarshalJSON(b []byte) error { value, err := strconv.ParseFloat(f, 64) if err != nil { - return fmt.Errorf("error parsing sample value: %s", err) + return fmt.Errorf("error parsing sample value: %w", err) } s.Value = SampleValue(value) return nil diff --git a/vendor/github.com/prometheus/common/model/value_float.go b/vendor/github.com/prometheus/common/model/value_float.go index 0f615a705..ae35cc2ab 100644 --- a/vendor/github.com/prometheus/common/model/value_float.go +++ b/vendor/github.com/prometheus/common/model/value_float.go @@ -20,14 +20,12 @@ import ( "strconv" ) -var ( - // ZeroSamplePair is the pseudo zero-value of SamplePair used to signal a - // non-existing sample pair. It is a SamplePair with timestamp Earliest and - // value 0.0. Note that the natural zero value of SamplePair has a timestamp - // of 0, which is possible to appear in a real SamplePair and thus not - // suitable to signal a non-existing SamplePair. - ZeroSamplePair = SamplePair{Timestamp: Earliest} -) +// ZeroSamplePair is the pseudo zero-value of SamplePair used to signal a +// non-existing sample pair. It is a SamplePair with timestamp Earliest and +// value 0.0. Note that the natural zero value of SamplePair has a timestamp +// of 0, which is possible to appear in a real SamplePair and thus not +// suitable to signal a non-existing SamplePair. +var ZeroSamplePair = SamplePair{Timestamp: Earliest} // A SampleValue is a representation of a value for a given sample at a given // time. diff --git a/vendor/github.com/tdewolff/minify/v2/html/html.go b/vendor/github.com/tdewolff/minify/v2/html/html.go index ab50ff650..aa078ef12 100644 --- a/vendor/github.com/tdewolff/minify/v2/html/html.go +++ b/vendor/github.com/tdewolff/minify/v2/html/html.go @@ -515,7 +515,7 @@ func (o *Minifier) Minify(m *minify.M, w io.Writer, r io.Reader, _ map[string]st // skip text in select and optgroup tags if t.Hash == Select || t.Hash == Optgroup { - if next := tb.Peek(0); next.TokenType == html.TextToken { + if next := tb.Peek(0); next.TokenType == html.TextToken && !next.HasTemplate { tb.Shift() } } diff --git a/vendor/github.com/tdewolff/parse/v2/binary.go b/vendor/github.com/tdewolff/parse/v2/binary.go new file mode 100644 index 000000000..83c08d742 --- /dev/null +++ b/vendor/github.com/tdewolff/parse/v2/binary.go @@ -0,0 +1,483 @@ +package parse + +import ( + "encoding/binary" + "fmt" + "io" + "math" + "os" +) + +// BinaryReader is a binary big endian file format reader. +type BinaryReader struct { + Endianness binary.ByteOrder + buf []byte + pos uint32 + eof bool +} + +// NewBinaryReader returns a big endian binary file format reader. +func NewBinaryReader(buf []byte) *BinaryReader { + if math.MaxUint32 < uint(len(buf)) { + return &BinaryReader{binary.BigEndian, nil, 0, true} + } + return &BinaryReader{binary.BigEndian, buf, 0, false} +} + +// NewBinaryReaderLE returns a little endian binary file format reader. +func NewBinaryReaderLE(buf []byte) *BinaryReader { + r := NewBinaryReader(buf) + r.Endianness = binary.LittleEndian + return r +} + +// Seek set the reader position in the buffer. +func (r *BinaryReader) Seek(pos uint32) error { + if uint32(len(r.buf)) < pos { + r.eof = true + return io.EOF + } + r.pos = pos + r.eof = false + return nil +} + +// Pos returns the reader's position. +func (r *BinaryReader) Pos() uint32 { + return r.pos +} + +// Len returns the remaining length of the buffer. +func (r *BinaryReader) Len() uint32 { + return uint32(len(r.buf)) - r.pos +} + +// EOF returns true if we reached the end-of-file. +func (r *BinaryReader) EOF() bool { + return r.eof +} + +// Read complies with io.Reader. +func (r *BinaryReader) Read(b []byte) (int, error) { + n := copy(b, r.buf[r.pos:]) + r.pos += uint32(n) + if r.pos == uint32(len(r.buf)) { + r.eof = true + return n, io.EOF + } + return n, nil +} + +// ReadBytes reads n bytes. +func (r *BinaryReader) ReadBytes(n uint32) []byte { + if r.eof || uint32(len(r.buf))-r.pos < n { + r.eof = true + return nil + } + buf := r.buf[r.pos : r.pos+n : r.pos+n] + r.pos += n + return buf +} + +// ReadString reads a string of length n. +func (r *BinaryReader) ReadString(n uint32) string { + return string(r.ReadBytes(n)) +} + +// ReadByte reads a single byte. +func (r *BinaryReader) ReadByte() byte { + b := r.ReadBytes(1) + if b == nil { + return 0 + } + return b[0] +} + +// ReadUint8 reads a uint8. +func (r *BinaryReader) ReadUint8() uint8 { + return r.ReadByte() +} + +// ReadUint16 reads a uint16. +func (r *BinaryReader) ReadUint16() uint16 { + b := r.ReadBytes(2) + if b == nil { + return 0 + } + return r.Endianness.Uint16(b) +} + +// ReadUint32 reads a uint32. +func (r *BinaryReader) ReadUint32() uint32 { + b := r.ReadBytes(4) + if b == nil { + return 0 + } + return r.Endianness.Uint32(b) +} + +// ReadUint64 reads a uint64. +func (r *BinaryReader) ReadUint64() uint64 { + b := r.ReadBytes(8) + if b == nil { + return 0 + } + return r.Endianness.Uint64(b) +} + +// ReadInt8 reads a int8. +func (r *BinaryReader) ReadInt8() int8 { + return int8(r.ReadByte()) +} + +// ReadInt16 reads a int16. +func (r *BinaryReader) ReadInt16() int16 { + return int16(r.ReadUint16()) +} + +// ReadInt32 reads a int32. +func (r *BinaryReader) ReadInt32() int32 { + return int32(r.ReadUint32()) +} + +// ReadInt64 reads a int64. +func (r *BinaryReader) ReadInt64() int64 { + return int64(r.ReadUint64()) +} + +type BinaryFileReader struct { + f *os.File + size uint64 + offset uint64 + + Endianness binary.ByteOrder + buf []byte + pos int +} + +func NewBinaryFileReader(f *os.File, chunk int) (*BinaryFileReader, error) { + var buf []byte + var size uint64 + if chunk == 0 { + var err error + if buf, err = io.ReadAll(f); err != nil { + return nil, err + } + } else { + buf = make([]byte, 0, chunk) + } + if info, err := f.Stat(); err != nil { + return nil, err + } else { + size = uint64(info.Size()) + } + return &BinaryFileReader{ + f: f, + size: size, + Endianness: binary.BigEndian, + buf: buf, + }, nil +} + +func (r *BinaryFileReader) buffer(pos, length uint64) error { + if pos < r.offset || r.offset+uint64(len(r.buf)) < pos+length { + if math.MaxInt64 < pos { + return fmt.Errorf("seek position too large") + } else if _, err := r.f.Seek(int64(pos), 0); err != nil { + return err + } else if n, err := r.f.Read(r.buf[:cap(r.buf)]); err != nil { + return err + } else { + r.offset = pos + r.buf = r.buf[:n] + r.pos = 0 + } + } + return nil +} + +// Seek set the reader position in the buffer. +func (r *BinaryFileReader) Seek(pos uint64) error { + if r.size <= pos { + return io.EOF + } else if err := r.buffer(pos, 0); err != nil { + return err + } + r.pos = int(pos - r.offset) + return nil +} + +// Pos returns the reader's position. +func (r *BinaryFileReader) Pos() uint64 { + return r.offset + uint64(r.pos) +} + +// Len returns the remaining length of the buffer. +func (r *BinaryFileReader) Len() uint64 { + return r.size - r.Pos() +} + +// Offset returns the offset of the buffer. +func (r *BinaryFileReader) Offset() uint64 { + return r.offset +} + +// BufferLen returns the length of the buffer. +func (r *BinaryFileReader) BufferLen() int { + return len(r.buf) +} + +// Read complies with io.Reader. +func (r *BinaryFileReader) Read(b []byte) (int, error) { + if len(b) <= cap(r.buf) { + if err := r.buffer(r.offset+uint64(r.pos), uint64(len(b))); err != nil { + return 0, err + } + n := copy(b, r.buf[r.pos:]) + r.pos += n + return n, nil + } + + // read directly from file + if _, err := r.f.Seek(int64(r.offset)+int64(r.pos), 0); err != nil { + return 0, err + } + n, err := r.f.Read(b) + r.offset += uint64(r.pos + n) + r.pos = 0 + r.buf = r.buf[:0] + return n, err +} + +// ReadBytes reads n bytes. +func (r *BinaryFileReader) ReadBytes(n int) []byte { + if n < len(r.buf)-r.pos { + b := r.buf[r.pos : r.pos+n] + r.pos += n + return b + } + + b := make([]byte, n) + if _, err := r.Read(b); err != nil { + return nil + } + return b +} + +// ReadString reads a string of length n. +func (r *BinaryFileReader) ReadString(n int) string { + return string(r.ReadBytes(n)) +} + +// ReadByte reads a single byte. +func (r *BinaryFileReader) ReadByte() byte { + b := r.ReadBytes(1) + if b == nil { + return 0 + } + return b[0] +} + +// ReadUint8 reads a uint8. +func (r *BinaryFileReader) ReadUint8() uint8 { + return r.ReadByte() +} + +// ReadUint16 reads a uint16. +func (r *BinaryFileReader) ReadUint16() uint16 { + b := r.ReadBytes(2) + if b == nil { + return 0 + } + return r.Endianness.Uint16(b) +} + +// ReadUint32 reads a uint32. +func (r *BinaryFileReader) ReadUint32() uint32 { + b := r.ReadBytes(4) + if b == nil { + return 0 + } + return r.Endianness.Uint32(b) +} + +// ReadUint64 reads a uint64. +func (r *BinaryFileReader) ReadUint64() uint64 { + b := r.ReadBytes(8) + if b == nil { + return 0 + } + return r.Endianness.Uint64(b) +} + +// ReadInt8 reads a int8. +func (r *BinaryFileReader) ReadInt8() int8 { + return int8(r.ReadByte()) +} + +// ReadInt16 reads a int16. +func (r *BinaryFileReader) ReadInt16() int16 { + return int16(r.ReadUint16()) +} + +// ReadInt32 reads a int32. +func (r *BinaryFileReader) ReadInt32() int32 { + return int32(r.ReadUint32()) +} + +// ReadInt64 reads a int64. +func (r *BinaryFileReader) ReadInt64() int64 { + return int64(r.ReadUint64()) +} + +// BinaryWriter is a big endian binary file format writer. +type BinaryWriter struct { + buf []byte +} + +// NewBinaryWriter returns a big endian binary file format writer. +func NewBinaryWriter(buf []byte) *BinaryWriter { + return &BinaryWriter{buf} +} + +// Len returns the buffer's length in bytes. +func (w *BinaryWriter) Len() uint32 { + return uint32(len(w.buf)) +} + +// Bytes returns the buffer's bytes. +func (w *BinaryWriter) Bytes() []byte { + return w.buf +} + +// Write complies with io.Writer. +func (w *BinaryWriter) Write(b []byte) (int, error) { + w.buf = append(w.buf, b...) + return len(b), nil +} + +// WriteBytes writes the given bytes to the buffer. +func (w *BinaryWriter) WriteBytes(v []byte) { + w.buf = append(w.buf, v...) +} + +// WriteString writes the given string to the buffer. +func (w *BinaryWriter) WriteString(v string) { + w.WriteBytes([]byte(v)) +} + +// WriteByte writes the given byte to the buffer. +func (w *BinaryWriter) WriteByte(v byte) { + w.WriteBytes([]byte{v}) +} + +// WriteUint8 writes the given uint8 to the buffer. +func (w *BinaryWriter) WriteUint8(v uint8) { + w.WriteByte(v) +} + +// WriteUint16 writes the given uint16 to the buffer. +func (w *BinaryWriter) WriteUint16(v uint16) { + pos := len(w.buf) + w.buf = append(w.buf, make([]byte, 2)...) + binary.BigEndian.PutUint16(w.buf[pos:], v) +} + +// WriteUint32 writes the given uint32 to the buffer. +func (w *BinaryWriter) WriteUint32(v uint32) { + pos := len(w.buf) + w.buf = append(w.buf, make([]byte, 4)...) + binary.BigEndian.PutUint32(w.buf[pos:], v) +} + +// WriteUint64 writes the given uint64 to the buffer. +func (w *BinaryWriter) WriteUint64(v uint64) { + pos := len(w.buf) + w.buf = append(w.buf, make([]byte, 8)...) + binary.BigEndian.PutUint64(w.buf[pos:], v) +} + +// WriteInt8 writes the given int8 to the buffer. +func (w *BinaryWriter) WriteInt8(v int8) { + w.WriteUint8(uint8(v)) +} + +// WriteInt16 writes the given int16 to the buffer. +func (w *BinaryWriter) WriteInt16(v int16) { + w.WriteUint16(uint16(v)) +} + +// WriteInt32 writes the given int32 to the buffer. +func (w *BinaryWriter) WriteInt32(v int32) { + w.WriteUint32(uint32(v)) +} + +// WriteInt64 writes the given int64 to the buffer. +func (w *BinaryWriter) WriteInt64(v int64) { + w.WriteUint64(uint64(v)) +} + +// BitmapReader is a binary bitmap reader. +type BitmapReader struct { + buf []byte + pos uint32 // TODO: to uint64 + eof bool +} + +// NewBitmapReader returns a binary bitmap reader. +func NewBitmapReader(buf []byte) *BitmapReader { + return &BitmapReader{buf, 0, false} +} + +// Pos returns the current bit position. +func (r *BitmapReader) Pos() uint32 { + return r.pos +} + +// EOF returns if we reached the buffer's end-of-file. +func (r *BitmapReader) EOF() bool { + return r.eof +} + +// Read reads the next bit. +func (r *BitmapReader) Read() bool { + if r.eof || uint32(len(r.buf)) <= (r.pos+1)/8 { + r.eof = true + return false + } + bit := r.buf[r.pos>>3]&(0x80>>(r.pos&7)) != 0 + r.pos += 1 + return bit +} + +// BitmapWriter is a binary bitmap writer. +type BitmapWriter struct { + buf []byte + pos uint32 +} + +// NewBitmapWriter returns a binary bitmap writer. +func NewBitmapWriter(buf []byte) *BitmapWriter { + return &BitmapWriter{buf, 0} +} + +// Len returns the buffer's length in bytes. +func (w *BitmapWriter) Len() uint32 { + return uint32(len(w.buf)) +} + +// Bytes returns the buffer's bytes. +func (w *BitmapWriter) Bytes() []byte { + return w.buf +} + +// Write writes the next bit. +func (w *BitmapWriter) Write(bit bool) { + if uint32(len(w.buf)) <= (w.pos+1)/8 { + w.buf = append(w.buf, 0) + } + if bit { + w.buf[w.pos>>3] = w.buf[w.pos>>3] | (0x80 >> (w.pos & 7)) + } + w.pos += 1 +} diff --git a/vendor/github.com/tdewolff/parse/v2/input.go b/vendor/github.com/tdewolff/parse/v2/input.go index 5b6d8f547..924f14f0c 100644 --- a/vendor/github.com/tdewolff/parse/v2/input.go +++ b/vendor/github.com/tdewolff/parse/v2/input.go @@ -92,7 +92,7 @@ func (z *Input) Err() error { func (z *Input) PeekErr(pos int) error { if z.err != nil { return z.err - } else if z.pos+pos >= len(z.buf)-1 { + } else if len(z.buf)-1 <= z.pos+pos { return io.EOF } return nil @@ -109,11 +109,11 @@ func (z *Input) Peek(pos int) byte { func (z *Input) PeekRune(pos int) (rune, int) { // from unicode/utf8 c := z.Peek(pos) - if c < 0xC0 || z.Peek(pos+1) == 0 { + if c < 0xC0 || len(z.buf)-1-z.pos < 2 { return rune(c), 1 - } else if c < 0xE0 || z.Peek(pos+2) == 0 { + } else if c < 0xE0 || len(z.buf)-1-z.pos < 3 { return rune(c&0x1F)<<6 | rune(z.Peek(pos+1)&0x3F), 2 - } else if c < 0xF0 || z.Peek(pos+3) == 0 { + } else if c < 0xF0 || len(z.buf)-1-z.pos < 4 { return rune(c&0x0F)<<12 | rune(z.Peek(pos+1)&0x3F)<<6 | rune(z.Peek(pos+2)&0x3F), 3 } return rune(c&0x07)<<18 | rune(z.Peek(pos+1)&0x3F)<<12 | rune(z.Peek(pos+2)&0x3F)<<6 | rune(z.Peek(pos+3)&0x3F), 4 @@ -124,6 +124,20 @@ func (z *Input) Move(n int) { z.pos += n } +// MoveRune advances the position by the length of the current rune. +func (z *Input) MoveRune() { + c := z.Peek(0) + if c < 0xC0 || len(z.buf)-1-z.pos < 2 { + z.pos++ + } else if c < 0xE0 || len(z.buf)-1-z.pos < 3 { + z.pos += 2 + } else if c < 0xF0 || len(z.buf)-1-z.pos < 4 { + z.pos += 3 + } else { + z.pos += 4 + } +} + // Pos returns a mark to which can be rewinded. func (z *Input) Pos() int { return z.pos - z.start |