From 0306fa46f8c4dfa68ce6bbcfb2fb1cc460794002 Mon Sep 17 00:00:00 2001 From: alemi Date: Fri, 7 Feb 2025 12:58:50 +0100 Subject: [PATCH] chore: go fmt ./... --- internal/transport/derefinstance.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/internal/transport/derefinstance.go b/internal/transport/derefinstance.go index 2eb55cff1..2bc9205da 100644 --- a/internal/transport/derefinstance.go +++ b/internal/transport/derefinstance.go @@ -25,8 +25,8 @@ import ( "io" "net/http" "net/url" - "strings" "slices" + "strings" apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model" apiutil "github.com/superseriousbusiness/gotosocial/internal/api/util" @@ -114,7 +114,7 @@ func dereferenceByAPIV1Instance(ctx context.Context, t *transport, iri *url.URL) // Ensure that we can fetch this endpoint if robots := resp.Header.Values("X-Robots-Tag"); slices.Contains(robots, "noindex") { - err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it"); + err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it") return nil, gtserror.SetNotPermitted(err) } @@ -276,7 +276,7 @@ func callNodeInfoWellKnown(ctx context.Context, t *transport, iri *url.URL) (*ur // Ensure that we can fetch this endpoint if robots := resp.Header.Values("X-Robots-Tag"); slices.Contains(robots, "noindex") { - err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it"); + err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it") return nil, gtserror.SetNotPermitted(err) } @@ -339,7 +339,7 @@ func callNodeInfo(ctx context.Context, t *transport, iri *url.URL) (*apimodel.No // Ensure that we can fetch this endpoint if robots := resp.Header.Values("X-Robots-Tag"); slices.Contains(robots, "noindex") { - err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it"); + err := gtserror.Newf("can't fetch this endpoint: robots tags disallows it") return nil, gtserror.SetNotPermitted(err) }