From baed591a1d19942ec553baed41a8048ab9dd18ca Mon Sep 17 00:00:00 2001 From: tobi <31960611+tsmethurst@users.noreply.github.com> Date: Wed, 5 Feb 2025 12:47:13 +0100 Subject: [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers (#3737) * [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers * use switch for RobotsHeaders --- internal/api/nodeinfo.go | 11 +++++++++++ 1 file changed, 11 insertions(+) (limited to 'internal/api/nodeinfo.go') diff --git a/internal/api/nodeinfo.go b/internal/api/nodeinfo.go index 29942aba4..2f0c234fd 100644 --- a/internal/api/nodeinfo.go +++ b/internal/api/nodeinfo.go @@ -20,6 +20,7 @@ package api import ( "github.com/gin-gonic/gin" "github.com/superseriousbusiness/gotosocial/internal/api/nodeinfo" + "github.com/superseriousbusiness/gotosocial/internal/config" "github.com/superseriousbusiness/gotosocial/internal/middleware" "github.com/superseriousbusiness/gotosocial/internal/processing" "github.com/superseriousbusiness/gotosocial/internal/router" @@ -43,6 +44,16 @@ func (w *NodeInfo) Route(r *router.Router, m ...gin.HandlerFunc) { }), ) + // If instance is configured to serve instance stats + // faithfully at nodeinfo, we should allow robots to + // crawl nodeinfo endpoints in a limited capacity. + // In all other cases, disallow everything. + if config.GetInstanceStatsMode() == config.InstanceStatsModeServe { + nodeInfoGroup.Use(middleware.RobotsHeaders("allowSome")) + } else { + nodeInfoGroup.Use(middleware.RobotsHeaders("")) + } + w.nodeInfo.Route(nodeInfoGroup.Handle) } -- cgit v1.2.3