[feature] Use X-Robots-Tag headers to instruct scrapers/crawlers (#3737)

* [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers

* use switch for RobotsHeaders
This commit is contained in:
tobi 2025-02-05 12:47:13 +01:00 committed by GitHub
commit baed591a1d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 311 additions and 142 deletions

View file

@ -20,6 +20,7 @@ package api
import (
"github.com/gin-gonic/gin"
"github.com/superseriousbusiness/gotosocial/internal/api/nodeinfo"
"github.com/superseriousbusiness/gotosocial/internal/config"
"github.com/superseriousbusiness/gotosocial/internal/middleware"
"github.com/superseriousbusiness/gotosocial/internal/processing"
"github.com/superseriousbusiness/gotosocial/internal/router"
@ -43,6 +44,16 @@ func (w *NodeInfo) Route(r *router.Router, m ...gin.HandlerFunc) {
}),
)
// If instance is configured to serve instance stats
// faithfully at nodeinfo, we should allow robots to
// crawl nodeinfo endpoints in a limited capacity.
// In all other cases, disallow everything.
if config.GetInstanceStatsMode() == config.InstanceStatsModeServe {
nodeInfoGroup.Use(middleware.RobotsHeaders("allowSome"))
} else {
nodeInfoGroup.Use(middleware.RobotsHeaders(""))
}
w.nodeInfo.Route(nodeInfoGroup.Handle)
}

52
internal/api/robots.go Normal file
View file

@ -0,0 +1,52 @@
// GoToSocial
// Copyright (C) GoToSocial Authors admin@gotosocial.org
// SPDX-License-Identifier: AGPL-3.0-or-later
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package api
import (
"github.com/gin-gonic/gin"
"github.com/superseriousbusiness/gotosocial/internal/api/robots"
"github.com/superseriousbusiness/gotosocial/internal/middleware"
"github.com/superseriousbusiness/gotosocial/internal/router"
)
type Robots struct {
robots *robots.Module
}
func (rb *Robots) Route(r *router.Router, m ...gin.HandlerFunc) {
// Create a group so we can attach middlewares.
robotsGroup := r.AttachGroup("robots.txt")
// Use passed-in middlewares.
robotsGroup.Use(m...)
// Allow caching for 24 hrs.
// https://www.rfc-editor.org/rfc/rfc9309.html#section-2.4
robotsGroup.Use(
middleware.CacheControl(middleware.CacheControlConfig{
Directives: []string{"public", "max-age=86400"},
Vary: []string{"Accept-Encoding"},
}),
)
rb.robots.Route(robotsGroup.Handle)
}
func NewRobots() *Robots {
return &Robots{}
}

View file

@ -0,0 +1,57 @@
// GoToSocial
// Copyright (C) GoToSocial Authors admin@gotosocial.org
// SPDX-License-Identifier: AGPL-3.0-or-later
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package robots
import (
"net/http"
"github.com/gin-gonic/gin"
apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util"
"github.com/superseriousbusiness/gotosocial/internal/config"
)
type Module struct{}
func New() *Module {
return &Module{}
}
func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) {
// Serve different robots.txt file depending on instance
// stats mode: Don't disallow scraping nodeinfo if admin
// has opted in to serving accurate stats there. In all
// other cases, disallow scraping nodeinfo.
var handler gin.HandlerFunc
if config.GetInstanceStatsMode() == config.InstanceStatsModeServe {
handler = m.robotsGETHandler
} else {
handler = m.robotsGETHandlerDisallowNodeInfo
}
// Attach handler at empty path as this
// is already grouped under /robots.txt.
attachHandler(http.MethodGet, "", handler)
}
func (m *Module) robotsGETHandler(c *gin.Context) {
c.String(http.StatusOK, apiutil.RobotsTxt)
}
func (m *Module) robotsGETHandlerDisallowNodeInfo(c *gin.Context) {
c.String(http.StatusOK, apiutil.RobotsTxtDisallowNodeInfo)
}

133
internal/api/util/robots.go Normal file
View file

@ -0,0 +1,133 @@
// GoToSocial
// Copyright (C) GoToSocial Authors admin@gotosocial.org
// SPDX-License-Identifier: AGPL-3.0-or-later
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package util
// See:
//
// - https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#robotsmeta
// - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Robots-Tag
// - https://www.rfc-editor.org/rfc/rfc9309.html
const (
RobotsDirectivesDisallow = "noindex, nofollow"
RobotsDirectivesAllowSome = "nofollow, noarchive, nositelinkssearchbox, max-image-preview:standard"
RobotsTxt = `# GoToSocial robots.txt -- to edit, see internal/api/util/robots.go
# More info @ https://developers.google.com/search/docs/crawling-indexing/robots/intro
# AI scrapers and the like.
# https://github.com/ai-robots-txt/ai.robots.txt/
User-agent: AI2Bot
User-agent: Ai2Bot-Dolma
User-agent: Amazonbot
User-agent: anthropic-ai
User-agent: Applebot
User-agent: Applebot-Extended
User-agent: Bytespider
User-agent: CCBot
User-agent: ChatGPT-User
User-agent: ClaudeBot
User-agent: Claude-Web
User-agent: cohere-ai
User-agent: cohere-training-data-crawler
User-agent: Diffbot
User-agent: DuckAssistBot
User-agent: FacebookBot
User-agent: FriendlyCrawler
User-agent: Google-Extended
User-agent: GoogleOther
User-agent: GoogleOther-Image
User-agent: GoogleOther-Video
User-agent: GPTBot
User-agent: iaskspider/2.0
User-agent: ICC-Crawler
User-agent: ImagesiftBot
User-agent: img2dataset
User-agent: ISSCyberRiskCrawler
User-agent: Kangaroo Bot
User-agent: Meta-ExternalAgent
User-agent: Meta-ExternalFetcher
User-agent: OAI-SearchBot
User-agent: omgili
User-agent: omgilibot
User-agent: PanguBot
User-agent: PerplexityBot
User-agent: PetalBot
User-agent: Scrapy
User-agent: Sidetrade indexer bot
User-agent: Timpibot
User-agent: VelenPublicWebCrawler
User-agent: Webzio-Extended
User-agent: YouBot
Disallow: /
# Marketing/SEO "intelligence" data scrapers
User-agent: AwarioRssBot
User-agent: AwarioSmartBot
User-agent: DataForSeoBot
User-agent: magpie-crawler
User-agent: Meltwater
User-agent: peer39_crawler
User-agent: peer39_crawler/1.0
User-agent: PiplBot
User-agent: scoop.it
User-agent: Seekr
Disallow: /
# Well-known.dev crawler. Indexes stuff under /.well-known.
# https://well-known.dev/about/
User-agent: WellKnownBot
Disallow: /
# Rules for everything else.
User-agent: *
Crawl-delay: 500
# API endpoints.
Disallow: /api/
# Auth/Sign in endpoints.
Disallow: /auth/
Disallow: /oauth/
Disallow: /check_your_email
Disallow: /wait_for_approval
Disallow: /account_disabled
Disallow: /signup
# Fileserver/media.
Disallow: /fileserver/
# Fedi S2S API endpoints.
Disallow: /users/
Disallow: /emoji/
# Settings panels.
Disallow: /admin
Disallow: /user
Disallow: /settings/
# Domain blocklist.
Disallow: /about/suspended
# Webfinger endpoint.
Disallow: /.well-known/webfinger
`
RobotsTxtDisallowNodeInfo = RobotsTxt + `
# Disallow nodeinfo
Disallow: /.well-known/nodeinfo
Disallow: /nodeinfo/
`
)

View file

@ -21,6 +21,7 @@ import (
"net/http"
"github.com/gin-gonic/gin"
"github.com/superseriousbusiness/gotosocial/internal/middleware"
"github.com/superseriousbusiness/gotosocial/internal/processing"
)
@ -40,5 +41,6 @@ func New(processor *processing.Processor) *Module {
}
func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) {
attachHandler(http.MethodGet, HostMetaPath, m.HostMetaGETHandler)
// Attach handler, injecting robots http header middleware to disallow all.
attachHandler(http.MethodGet, HostMetaPath, middleware.RobotsHeaders(""), m.HostMetaGETHandler)
}

View file

@ -21,6 +21,10 @@ import (
"net/http"
"github.com/gin-gonic/gin"
apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util"
"github.com/superseriousbusiness/gotosocial/internal/config"
"github.com/superseriousbusiness/gotosocial/internal/gtserror"
"github.com/superseriousbusiness/gotosocial/internal/middleware"
"github.com/superseriousbusiness/gotosocial/internal/processing"
)
@ -42,5 +46,57 @@ func New(processor *processing.Processor) *Module {
}
func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) {
attachHandler(http.MethodGet, NodeInfoWellKnownPath, m.NodeInfoWellKnownGETHandler)
// If instance is configured to serve instance stats
// faithfully at nodeinfo, we should allow robots to
// crawl nodeinfo endpoints in a limited capacity.
// In all other cases, disallow everything.
var robots gin.HandlerFunc
if config.GetInstanceStatsMode() == config.InstanceStatsModeServe {
robots = middleware.RobotsHeaders("allowSome")
} else {
robots = middleware.RobotsHeaders("")
}
// Attach handler, injecting robots http header middleware.
attachHandler(http.MethodGet, NodeInfoWellKnownPath, robots, m.NodeInfoWellKnownGETHandler)
}
// NodeInfoWellKnownGETHandler swagger:operation GET /.well-known/nodeinfo nodeInfoWellKnownGet
//
// Returns a well-known response which redirects callers to `/nodeinfo/2.0`.
//
// eg. `{"links":[{"rel":"http://nodeinfo.diaspora.software/ns/schema/2.0","href":"http://example.org/nodeinfo/2.0"}]}`
// See: https://nodeinfo.diaspora.software/protocol.html
//
// ---
// tags:
// - .well-known
//
// produces:
// - application/json
//
// responses:
// '200':
// schema:
// "$ref": "#/definitions/wellKnownResponse"
func (m *Module) NodeInfoWellKnownGETHandler(c *gin.Context) {
if _, err := apiutil.NegotiateAccept(c, apiutil.JSONAcceptHeaders...); err != nil {
apiutil.ErrorHandler(c, gtserror.NewErrorNotAcceptable(err, err.Error()), m.processor.InstanceGetV1)
return
}
resp, errWithCode := m.processor.Fedi().NodeInfoRelGet(c.Request.Context())
if errWithCode != nil {
apiutil.ErrorHandler(c, errWithCode, m.processor.InstanceGetV1)
return
}
// Encode JSON HTTP response.
apiutil.EncodeJSONResponse(
c.Writer,
c.Request,
http.StatusOK,
apiutil.AppJSON,
resp,
)
}

View file

@ -1,66 +0,0 @@
// GoToSocial
// Copyright (C) GoToSocial Authors admin@gotosocial.org
// SPDX-License-Identifier: AGPL-3.0-or-later
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package nodeinfo
import (
"net/http"
"github.com/gin-gonic/gin"
apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util"
"github.com/superseriousbusiness/gotosocial/internal/gtserror"
)
// NodeInfoWellKnownGETHandler swagger:operation GET /.well-known/nodeinfo nodeInfoWellKnownGet
//
// Returns a well-known response which redirects callers to `/nodeinfo/2.0`.
//
// eg. `{"links":[{"rel":"http://nodeinfo.diaspora.software/ns/schema/2.0","href":"http://example.org/nodeinfo/2.0"}]}`
// See: https://nodeinfo.diaspora.software/protocol.html
//
// ---
// tags:
// - .well-known
//
// produces:
// - application/json
//
// responses:
// '200':
// schema:
// "$ref": "#/definitions/wellKnownResponse"
func (m *Module) NodeInfoWellKnownGETHandler(c *gin.Context) {
if _, err := apiutil.NegotiateAccept(c, apiutil.JSONAcceptHeaders...); err != nil {
apiutil.ErrorHandler(c, gtserror.NewErrorNotAcceptable(err, err.Error()), m.processor.InstanceGetV1)
return
}
resp, errWithCode := m.processor.Fedi().NodeInfoRelGet(c.Request.Context())
if errWithCode != nil {
apiutil.ErrorHandler(c, errWithCode, m.processor.InstanceGetV1)
return
}
// Encode JSON HTTP response.
apiutil.EncodeJSONResponse(
c.Writer,
c.Request,
http.StatusOK,
apiutil.AppJSON,
resp,
)
}

View file

@ -21,6 +21,7 @@ import (
"net/http"
"github.com/gin-gonic/gin"
"github.com/superseriousbusiness/gotosocial/internal/middleware"
"github.com/superseriousbusiness/gotosocial/internal/processing"
)
@ -41,5 +42,6 @@ func New(processor *processing.Processor) *Module {
}
func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) {
attachHandler(http.MethodGet, WebfingerBasePath, m.WebfingerGETRequest)
// Attach handler, injecting robots http header middleware to disallow all.
attachHandler(http.MethodGet, WebfingerBasePath, middleware.RobotsHeaders(""), m.WebfingerGETRequest)
}