mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2025-10-29 17:02:25 -05:00
[feature] Use X-Robots-Tag headers to instruct scrapers/crawlers (#3737)
* [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers * use switch for RobotsHeaders
This commit is contained in:
parent
bfb81f5bac
commit
baed591a1d
15 changed files with 311 additions and 142 deletions
|
|
@ -20,6 +20,7 @@ package api
|
|||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/api/nodeinfo"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/config"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/middleware"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/processing"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/router"
|
||||
|
|
@ -43,6 +44,16 @@ func (w *NodeInfo) Route(r *router.Router, m ...gin.HandlerFunc) {
|
|||
}),
|
||||
)
|
||||
|
||||
// If instance is configured to serve instance stats
|
||||
// faithfully at nodeinfo, we should allow robots to
|
||||
// crawl nodeinfo endpoints in a limited capacity.
|
||||
// In all other cases, disallow everything.
|
||||
if config.GetInstanceStatsMode() == config.InstanceStatsModeServe {
|
||||
nodeInfoGroup.Use(middleware.RobotsHeaders("allowSome"))
|
||||
} else {
|
||||
nodeInfoGroup.Use(middleware.RobotsHeaders(""))
|
||||
}
|
||||
|
||||
w.nodeInfo.Route(nodeInfoGroup.Handle)
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue