mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2025-11-14 18:17:29 -06:00
[feature] Use X-Robots-Tag headers to instruct scrapers/crawlers (#3737)
* [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers * use switch for RobotsHeaders
This commit is contained in:
parent
bfb81f5bac
commit
baed591a1d
15 changed files with 311 additions and 142 deletions
|
|
@ -21,6 +21,7 @@ import (
|
|||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/middleware"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/processing"
|
||||
)
|
||||
|
||||
|
|
@ -40,5 +41,6 @@ func New(processor *processing.Processor) *Module {
|
|||
}
|
||||
|
||||
func (m *Module) Route(attachHandler func(method string, path string, f ...gin.HandlerFunc) gin.IRoutes) {
|
||||
attachHandler(http.MethodGet, HostMetaPath, m.HostMetaGETHandler)
|
||||
// Attach handler, injecting robots http header middleware to disallow all.
|
||||
attachHandler(http.MethodGet, HostMetaPath, middleware.RobotsHeaders(""), m.HostMetaGETHandler)
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue