diff options
author | 2025-02-05 12:47:13 +0100 | |
---|---|---|
committer | 2025-02-05 12:47:13 +0100 | |
commit | baed591a1d19942ec553baed41a8048ab9dd18ca (patch) | |
tree | b8c91d4f193ab2a80e71f222fb1bda4bb775805b /internal/api | |
parent | [bugfix] wrong nodeinfo version (tobi is a boob) (#3735) (diff) | |
download | gotosocial-baed591a1d19942ec553baed41a8048ab9dd18ca.tar.xz |
[feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers (#3737)
* [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers
* use switch for RobotsHeaders
Diffstat (limited to 'internal/api')
-rw-r--r-- | internal/api/nodeinfo.go | 11 | ||||
-rw-r--r-- | internal/api/robots.go | 52 | ||||
-rw-r--r-- | internal/api/robots/robots.go | 57 | ||||
-rw-r--r-- | internal/api/util/robots.go | 133 | ||||
-rw-r--r-- | internal/api/wellknown/hostmeta/hostmeta.go | 4 | ||||
-rw-r--r-- | internal/api/wellknown/nodeinfo/nodeinfo.go | 58 | ||||
-rw-r--r-- | internal/api/wellknown/nodeinfo/nodeinfoget.go | 66 | ||||
-rw-r--r-- | internal/api/wellknown/webfinger/webfinger.go | 4 |
8 files changed, 316 insertions, 69 deletions
diff --git a/internal/api/nodeinfo.go b/internal/api/nodeinfo.go index 29942aba4..2f0c234fd 100644 --- a/internal/api/nodeinfo.go +++ b/internal/api/nodeinfo.go @@ -20,6 +20,7 @@ package api import ( "github.com/gin-gonic/gin" "github.com/superseriousbusiness/gotosocial/internal/api/nodeinfo" + "github.com/superseriousbusiness/gotosocial/internal/config" "github.com/superseriousbusiness/gotosocial/internal/middleware" "github.com/superseriousbusiness/gotosocial/internal/processing" "github.com/superseriousbusiness/gotosocial/internal/router" @@ -43,6 +44,16 @@ func (w *NodeInfo) Route(r *router.Router, m ...gin.HandlerFunc) { }), ) + // If instance is configured to serve instance stats + // faithfully at nodeinfo, we should allow robots to + // crawl nodeinfo endpoints in a limited capacity. + // In all other cases, disallow everything. + if config.GetInstanceStatsMode() == config.InstanceStatsModeServe { + nodeInfoGroup.Use(middleware.RobotsHeaders("allowSome")) + } else { + nodeInfoGroup.Use(middleware.RobotsHeaders("")) + } + w.nodeInfo.Route(nodeInfoGroup.Handle) } diff --git a/internal/api/robots.go b/internal/api/robots.go new file mode 100644 index 000000000..3ed8282f5 --- /dev/null +++ b/internal/api/robots.go @@ -0,0 +1,52 @@ +// GoToSocial +// Copyright (C) GoToSocial Authors admin@gotosocial.org +// SPDX-License-Identifier: AGPL-3.0-or-later +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <http://www.gnu.org/licenses/>. + +package api + +import ( + "github.com/gin-gonic/gin" + "github.com/superseriousbusiness/gotosocial/internal/api/robots" + "github.com/superseriousbusiness/gotosocial/internal/middleware" + "github.com/superseriousbusiness/gotosocial/internal/router" +) + +type Robots struct { + robots *robots.Module +} + +func (rb *Robots) Route(r *router.Router, m ...gin.HandlerFunc) { + // Create a group so we can attach middlewares. + robotsGroup := r.AttachGroup("robots.txt") + + // Use passed-in middlewares. + robotsGroup.Use(m...) + + // Allow caching for 24 hrs. + // https://www.rfc-editor.org/rfc/rfc9309.html#section-2.4 + robotsGroup.Use( + middleware.CacheControl(middleware.CacheControlConfig{ + Directives: []string{"public", "max-age=86400"}, + Vary: []string{"Accept-Encoding"}, + }), + ) + + rb.robots.Route(robotsGroup.Handle) +} + +func NewRobots() *Robots { + return &Robots{} +} diff --git a/internal/api/robots/robots.go b/internal/api/robots/robots.go new file mode 100644 index 000000000..98db4682d --- /dev/null +++ b/internal/api/robots/robots.go @@ -0,0 +1,57 @@ +// GoToSocial +// Copyright (C) GoToSocial Authors admin@gotosocial.org +// SPDX-License-Identifier: AGPL-3.0-or-later +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <http://www.gnu.org/licenses/>. + +package robots + +import ( + "net/http" + + "github.com/gin-gonic/gin" + apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util" + "github.com/superseriousbusiness/gotosocial/internal/config" +) + +type Module struct{} + +func New() *Module { + return &Module{} +} + +func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) { + // Serve different robots.txt file depending on instance + // stats mode: Don't disallow scraping nodeinfo if admin + // has opted in to serving accurate stats there. In all + // other cases, disallow scraping nodeinfo. + var handler gin.HandlerFunc + if config.GetInstanceStatsMode() == config.InstanceStatsModeServe { + handler = m.robotsGETHandler + } else { + handler = m.robotsGETHandlerDisallowNodeInfo + } + + // Attach handler at empty path as this + // is already grouped under /robots.txt. + attachHandler(http.MethodGet, "", handler) +} + +func (m *Module) robotsGETHandler(c *gin.Context) { + c.String(http.StatusOK, apiutil.RobotsTxt) +} + +func (m *Module) robotsGETHandlerDisallowNodeInfo(c *gin.Context) { + c.String(http.StatusOK, apiutil.RobotsTxtDisallowNodeInfo) +} diff --git a/internal/api/util/robots.go b/internal/api/util/robots.go new file mode 100644 index 000000000..49fb04561 --- /dev/null +++ b/internal/api/util/robots.go @@ -0,0 +1,133 @@ +// GoToSocial +// Copyright (C) GoToSocial Authors admin@gotosocial.org +// SPDX-License-Identifier: AGPL-3.0-or-later +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <http://www.gnu.org/licenses/>. + +package util + +// See: +// +// - https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#robotsmeta +// - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Robots-Tag +// - https://www.rfc-editor.org/rfc/rfc9309.html +const ( + RobotsDirectivesDisallow = "noindex, nofollow" + RobotsDirectivesAllowSome = "nofollow, noarchive, nositelinkssearchbox, max-image-preview:standard" + RobotsTxt = `# GoToSocial robots.txt -- to edit, see internal/api/util/robots.go +# More info @ https://developers.google.com/search/docs/crawling-indexing/robots/intro + +# AI scrapers and the like. +# https://github.com/ai-robots-txt/ai.robots.txt/ +User-agent: AI2Bot +User-agent: Ai2Bot-Dolma +User-agent: Amazonbot +User-agent: anthropic-ai +User-agent: Applebot +User-agent: Applebot-Extended +User-agent: Bytespider +User-agent: CCBot +User-agent: ChatGPT-User +User-agent: ClaudeBot +User-agent: Claude-Web +User-agent: cohere-ai +User-agent: cohere-training-data-crawler +User-agent: Diffbot +User-agent: DuckAssistBot +User-agent: FacebookBot +User-agent: FriendlyCrawler +User-agent: Google-Extended +User-agent: GoogleOther +User-agent: GoogleOther-Image +User-agent: GoogleOther-Video +User-agent: GPTBot +User-agent: iaskspider/2.0 +User-agent: ICC-Crawler +User-agent: ImagesiftBot +User-agent: img2dataset +User-agent: ISSCyberRiskCrawler +User-agent: Kangaroo Bot +User-agent: Meta-ExternalAgent +User-agent: Meta-ExternalFetcher +User-agent: OAI-SearchBot +User-agent: omgili +User-agent: omgilibot +User-agent: PanguBot +User-agent: PerplexityBot +User-agent: PetalBot +User-agent: Scrapy +User-agent: Sidetrade indexer bot +User-agent: Timpibot +User-agent: VelenPublicWebCrawler +User-agent: Webzio-Extended +User-agent: YouBot +Disallow: / + +# Marketing/SEO "intelligence" data scrapers +User-agent: AwarioRssBot +User-agent: AwarioSmartBot +User-agent: DataForSeoBot +User-agent: magpie-crawler +User-agent: Meltwater +User-agent: peer39_crawler +User-agent: peer39_crawler/1.0 +User-agent: PiplBot +User-agent: scoop.it +User-agent: Seekr +Disallow: / + +# Well-known.dev crawler. Indexes stuff under /.well-known. +# https://well-known.dev/about/ +User-agent: WellKnownBot +Disallow: / + +# Rules for everything else. +User-agent: * +Crawl-delay: 500 + +# API endpoints. +Disallow: /api/ + +# Auth/Sign in endpoints. +Disallow: /auth/ +Disallow: /oauth/ +Disallow: /check_your_email +Disallow: /wait_for_approval +Disallow: /account_disabled +Disallow: /signup + +# Fileserver/media. +Disallow: /fileserver/ + +# Fedi S2S API endpoints. +Disallow: /users/ +Disallow: /emoji/ + +# Settings panels. +Disallow: /admin +Disallow: /user +Disallow: /settings/ + +# Domain blocklist. +Disallow: /about/suspended + +# Webfinger endpoint. +Disallow: /.well-known/webfinger +` + RobotsTxtDisallowNodeInfo = RobotsTxt + ` +# Disallow nodeinfo +Disallow: /.well-known/nodeinfo +Disallow: /nodeinfo/ +` +) diff --git a/internal/api/wellknown/hostmeta/hostmeta.go b/internal/api/wellknown/hostmeta/hostmeta.go index cb439fcd3..43c6b161e 100644 --- a/internal/api/wellknown/hostmeta/hostmeta.go +++ b/internal/api/wellknown/hostmeta/hostmeta.go @@ -21,6 +21,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/superseriousbusiness/gotosocial/internal/middleware" "github.com/superseriousbusiness/gotosocial/internal/processing" ) @@ -40,5 +41,6 @@ func New(processor *processing.Processor) *Module { } func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) { - attachHandler(http.MethodGet, HostMetaPath, m.HostMetaGETHandler) + // Attach handler, injecting robots http header middleware to disallow all. + attachHandler(http.MethodGet, HostMetaPath, middleware.RobotsHeaders(""), m.HostMetaGETHandler) } diff --git a/internal/api/wellknown/nodeinfo/nodeinfo.go b/internal/api/wellknown/nodeinfo/nodeinfo.go index 9012006f4..270dde2b1 100644 --- a/internal/api/wellknown/nodeinfo/nodeinfo.go +++ b/internal/api/wellknown/nodeinfo/nodeinfo.go @@ -21,6 +21,10 @@ import ( "net/http" "github.com/gin-gonic/gin" + apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util" + "github.com/superseriousbusiness/gotosocial/internal/config" + "github.com/superseriousbusiness/gotosocial/internal/gtserror" + "github.com/superseriousbusiness/gotosocial/internal/middleware" "github.com/superseriousbusiness/gotosocial/internal/processing" ) @@ -42,5 +46,57 @@ func New(processor *processing.Processor) *Module { } func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) { - attachHandler(http.MethodGet, NodeInfoWellKnownPath, m.NodeInfoWellKnownGETHandler) + // If instance is configured to serve instance stats + // faithfully at nodeinfo, we should allow robots to + // crawl nodeinfo endpoints in a limited capacity. + // In all other cases, disallow everything. + var robots gin.HandlerFunc + if config.GetInstanceStatsMode() == config.InstanceStatsModeServe { + robots = middleware.RobotsHeaders("allowSome") + } else { + robots = middleware.RobotsHeaders("") + } + + // Attach handler, injecting robots http header middleware. + attachHandler(http.MethodGet, NodeInfoWellKnownPath, robots, m.NodeInfoWellKnownGETHandler) +} + +// NodeInfoWellKnownGETHandler swagger:operation GET /.well-known/nodeinfo nodeInfoWellKnownGet +// +// Returns a well-known response which redirects callers to `/nodeinfo/2.0`. +// +// eg. `{"links":[{"rel":"http://nodeinfo.diaspora.software/ns/schema/2.0","href":"http://example.org/nodeinfo/2.0"}]}` +// See: https://nodeinfo.diaspora.software/protocol.html +// +// --- +// tags: +// - .well-known +// +// produces: +// - application/json +// +// responses: +// '200': +// schema: +// "$ref": "#/definitions/wellKnownResponse" +func (m *Module) NodeInfoWellKnownGETHandler(c *gin.Context) { + if _, err := apiutil.NegotiateAccept(c, apiutil.JSONAcceptHeaders...); err != nil { + apiutil.ErrorHandler(c, gtserror.NewErrorNotAcceptable(err, err.Error()), m.processor.InstanceGetV1) + return + } + + resp, errWithCode := m.processor.Fedi().NodeInfoRelGet(c.Request.Context()) + if errWithCode != nil { + apiutil.ErrorHandler(c, errWithCode, m.processor.InstanceGetV1) + return + } + + // Encode JSON HTTP response. + apiutil.EncodeJSONResponse( + c.Writer, + c.Request, + http.StatusOK, + apiutil.AppJSON, + resp, + ) } diff --git a/internal/api/wellknown/nodeinfo/nodeinfoget.go b/internal/api/wellknown/nodeinfo/nodeinfoget.go deleted file mode 100644 index c458f131e..000000000 --- a/internal/api/wellknown/nodeinfo/nodeinfoget.go +++ /dev/null @@ -1,66 +0,0 @@ -// GoToSocial -// Copyright (C) GoToSocial Authors admin@gotosocial.org -// SPDX-License-Identifier: AGPL-3.0-or-later -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see <http://www.gnu.org/licenses/>. - -package nodeinfo - -import ( - "net/http" - - "github.com/gin-gonic/gin" - apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util" - "github.com/superseriousbusiness/gotosocial/internal/gtserror" -) - -// NodeInfoWellKnownGETHandler swagger:operation GET /.well-known/nodeinfo nodeInfoWellKnownGet -// -// Returns a well-known response which redirects callers to `/nodeinfo/2.0`. -// -// eg. `{"links":[{"rel":"http://nodeinfo.diaspora.software/ns/schema/2.0","href":"http://example.org/nodeinfo/2.0"}]}` -// See: https://nodeinfo.diaspora.software/protocol.html -// -// --- -// tags: -// - .well-known -// -// produces: -// - application/json -// -// responses: -// '200': -// schema: -// "$ref": "#/definitions/wellKnownResponse" -func (m *Module) NodeInfoWellKnownGETHandler(c *gin.Context) { - if _, err := apiutil.NegotiateAccept(c, apiutil.JSONAcceptHeaders...); err != nil { - apiutil.ErrorHandler(c, gtserror.NewErrorNotAcceptable(err, err.Error()), m.processor.InstanceGetV1) - return - } - - resp, errWithCode := m.processor.Fedi().NodeInfoRelGet(c.Request.Context()) - if errWithCode != nil { - apiutil.ErrorHandler(c, errWithCode, m.processor.InstanceGetV1) - return - } - - // Encode JSON HTTP response. - apiutil.EncodeJSONResponse( - c.Writer, - c.Request, - http.StatusOK, - apiutil.AppJSON, - resp, - ) -} diff --git a/internal/api/wellknown/webfinger/webfinger.go b/internal/api/wellknown/webfinger/webfinger.go index a50013b32..c70afab9d 100644 --- a/internal/api/wellknown/webfinger/webfinger.go +++ b/internal/api/wellknown/webfinger/webfinger.go @@ -21,6 +21,7 @@ import ( "net/http" "github.com/gin-gonic/gin" + "github.com/superseriousbusiness/gotosocial/internal/middleware" "github.com/superseriousbusiness/gotosocial/internal/processing" ) @@ -41,5 +42,6 @@ func New(processor *processing.Processor) *Module { } func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) { - attachHandler(http.MethodGet, WebfingerBasePath, m.WebfingerGETRequest) + // Attach handler, injecting robots http header middleware to disallow all. + attachHandler(http.MethodGet, WebfingerBasePath, middleware.RobotsHeaders(""), m.WebfingerGETRequest) } |