mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2025-12-30 00:36:14 -06:00
feat: check X-Robots-Tag
when accessing /api/v1/instance or /nodeinfo endpoints respect X-Robots-Tag
This commit is contained in:
parent
00bd0f7658
commit
f4f83f9f0e
1 changed files with 19 additions and 0 deletions
|
|
@ -26,6 +26,7 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
"slices"
|
||||||
|
|
||||||
apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model"
|
apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model"
|
||||||
apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util"
|
apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util"
|
||||||
|
|
@ -111,6 +112,12 @@ func dereferenceByAPIV1Instance(ctx context.Context, t *transport, iri *url.URL)
|
||||||
return nil, gtserror.SetMalformed(err)
|
return nil, gtserror.SetMalformed(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure that we can fetch this endpoint
|
||||||
|
if robots := resp.Header.Values("X-Robots-Tag"); slices.Contains(robots, "noindex") {
|
||||||
|
err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it");
|
||||||
|
return nil, gtserror.SetNotPermitted(err)
|
||||||
|
}
|
||||||
|
|
||||||
b, err := io.ReadAll(resp.Body)
|
b, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -267,6 +274,12 @@ func callNodeInfoWellKnown(ctx context.Context, t *transport, iri *url.URL) (*ur
|
||||||
return nil, gtserror.SetMalformed(err)
|
return nil, gtserror.SetMalformed(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure that we can fetch this endpoint
|
||||||
|
if robots := resp.Header.Values("X-Robots-Tag"); slices.Contains(robots, "noindex") {
|
||||||
|
err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it");
|
||||||
|
return nil, gtserror.SetNotPermitted(err)
|
||||||
|
}
|
||||||
|
|
||||||
b, err := io.ReadAll(resp.Body)
|
b, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
@ -324,6 +337,12 @@ func callNodeInfo(ctx context.Context, t *transport, iri *url.URL) (*apimodel.No
|
||||||
return nil, gtserror.SetMalformed(err)
|
return nil, gtserror.SetMalformed(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure that we can fetch this endpoint
|
||||||
|
if robots := resp.Header.Values("X-Robots-Tag"); slices.Contains(robots, "noindex") {
|
||||||
|
err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it");
|
||||||
|
return nil, gtserror.SetNotPermitted(err)
|
||||||
|
}
|
||||||
|
|
||||||
b, err := io.ReadAll(resp.Body)
|
b, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue