mirror of
https://github.com/superseriousbusiness/gotosocial.git
synced 2025-10-29 02:22:26 -05:00
[feature] add paging to account follows, followers and follow requests endpoints (#2186)
This commit is contained in:
parent
4b594516ec
commit
7293d6029b
51 changed files with 2281 additions and 641 deletions
|
|
@ -17,10 +17,10 @@
|
|||
|
||||
package paging
|
||||
|
||||
// MinID returns an ID boundary with given min ID value,
|
||||
// EitherMinID returns an ID boundary with given min ID value,
|
||||
// using either the `since_id`,"DESC" name,ordering or
|
||||
// `min_id`,"ASC" name,ordering depending on which is set.
|
||||
func MinID(minID, sinceID string) Boundary {
|
||||
func EitherMinID(minID, sinceID string) Boundary {
|
||||
/*
|
||||
|
||||
Paging with `since_id` vs `min_id`:
|
||||
|
|
@ -47,18 +47,28 @@ func MinID(minID, sinceID string) Boundary {
|
|||
*/
|
||||
switch {
|
||||
case minID != "":
|
||||
return Boundary{
|
||||
Name: "min_id",
|
||||
Value: minID,
|
||||
Order: OrderAscending,
|
||||
}
|
||||
return MinID(minID)
|
||||
default:
|
||||
// default min is `since_id`
|
||||
return Boundary{
|
||||
Name: "since_id",
|
||||
Value: sinceID,
|
||||
Order: OrderDescending,
|
||||
}
|
||||
return SinceID(sinceID)
|
||||
}
|
||||
}
|
||||
|
||||
// SinceID ...
|
||||
func SinceID(sinceID string) Boundary {
|
||||
return Boundary{
|
||||
Name: "since_id",
|
||||
Value: sinceID,
|
||||
Order: OrderDescending,
|
||||
}
|
||||
}
|
||||
|
||||
// MinID ...
|
||||
func MinID(minID string) Boundary {
|
||||
return Boundary{
|
||||
Name: "min_id",
|
||||
Value: minID,
|
||||
Order: OrderAscending,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -111,7 +121,7 @@ func (b Boundary) new(value string) Boundary {
|
|||
|
||||
// Find finds the boundary's set value in input slice, or returns -1.
|
||||
func (b Boundary) Find(in []string) int {
|
||||
if zero(b.Value) {
|
||||
if b.Value == "" {
|
||||
return -1
|
||||
}
|
||||
for i := range in {
|
||||
|
|
@ -121,15 +131,3 @@ func (b Boundary) Find(in []string) int {
|
|||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// Query returns this boundary as assembled query key=value pair.
|
||||
func (b Boundary) Query() string {
|
||||
switch {
|
||||
case zero(b.Value):
|
||||
return ""
|
||||
case b.Name == "":
|
||||
panic("value without boundary name")
|
||||
default:
|
||||
return b.Name + "=" + b.Value
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ package paging
|
|||
import (
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
|
@ -70,26 +69,10 @@ func (p *Page) GetOrder() Order {
|
|||
}
|
||||
|
||||
func (p *Page) order() Order {
|
||||
var (
|
||||
// Check if min/max values set.
|
||||
minValue = zero(p.Min.Value)
|
||||
maxValue = zero(p.Max.Value)
|
||||
|
||||
// Check if min/max orders set.
|
||||
minOrder = (p.Min.Order != 0)
|
||||
maxOrder = (p.Max.Order != 0)
|
||||
)
|
||||
|
||||
switch {
|
||||
// Boundaries with a value AND order set
|
||||
// take priority. Min always comes first.
|
||||
case minValue && minOrder:
|
||||
case p.Min.Order != 0:
|
||||
return p.Min.Order
|
||||
case maxValue && maxOrder:
|
||||
return p.Max.Order
|
||||
case minOrder:
|
||||
return p.Min.Order
|
||||
case maxOrder:
|
||||
case p.Max.Order != 0:
|
||||
return p.Max.Order
|
||||
default:
|
||||
return 0
|
||||
|
|
@ -108,31 +91,9 @@ func (p *Page) Page(in []string) []string {
|
|||
return in
|
||||
}
|
||||
|
||||
if o := p.order(); !o.Ascending() {
|
||||
// Default sort is descending,
|
||||
// catching all cases when NOT
|
||||
// ascending (even zero value).
|
||||
//
|
||||
// NOTE: sorted data does not always
|
||||
// occur according to string ineqs
|
||||
// so we unfortunately cannot check.
|
||||
|
||||
if maxIdx := p.Max.Find(in); maxIdx != -1 {
|
||||
// Reslice skipping up to max.
|
||||
in = in[maxIdx+1:]
|
||||
}
|
||||
|
||||
if minIdx := p.Min.Find(in); minIdx != -1 {
|
||||
// Reslice stripping past min.
|
||||
in = in[:minIdx]
|
||||
}
|
||||
} else {
|
||||
if p.order().Ascending() {
|
||||
// Sort type is ascending, input
|
||||
// data is assumed to be ascending.
|
||||
//
|
||||
// NOTE: sorted data does not always
|
||||
// occur according to string ineqs
|
||||
// so we unfortunately cannot check.
|
||||
|
||||
if minIdx := p.Min.Find(in); minIdx != -1 {
|
||||
// Reslice skipping up to min.
|
||||
|
|
@ -144,6 +105,11 @@ func (p *Page) Page(in []string) []string {
|
|||
in = in[:maxIdx]
|
||||
}
|
||||
|
||||
if p.Limit > 0 && p.Limit < len(in) {
|
||||
// Reslice input to limit.
|
||||
in = in[:p.Limit]
|
||||
}
|
||||
|
||||
if len(in) > 1 {
|
||||
// Clone input before
|
||||
// any modifications.
|
||||
|
|
@ -153,11 +119,25 @@ func (p *Page) Page(in []string) []string {
|
|||
// ALWAYS be descending.
|
||||
in = Reverse(in)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Default sort is descending,
|
||||
// catching all cases when NOT
|
||||
// ascending (even zero value).
|
||||
|
||||
if p.Limit > 0 && p.Limit < len(in) {
|
||||
// Reslice input to limit.
|
||||
in = in[:p.Limit]
|
||||
if maxIdx := p.Max.Find(in); maxIdx != -1 {
|
||||
// Reslice skipping up to max.
|
||||
in = in[maxIdx+1:]
|
||||
}
|
||||
|
||||
if minIdx := p.Min.Find(in); minIdx != -1 {
|
||||
// Reslice stripping past min.
|
||||
in = in[:minIdx]
|
||||
}
|
||||
|
||||
if p.Limit > 0 && p.Limit < len(in) {
|
||||
// Reslice input to limit.
|
||||
in = in[:p.Limit]
|
||||
}
|
||||
}
|
||||
|
||||
return in
|
||||
|
|
@ -165,8 +145,8 @@ func (p *Page) Page(in []string) []string {
|
|||
|
||||
// Next creates a new instance for the next returnable page, using
|
||||
// given max value. This preserves original limit and max key name.
|
||||
func (p *Page) Next(max string) *Page {
|
||||
if p == nil || max == "" {
|
||||
func (p *Page) Next(lo, hi string) *Page {
|
||||
if p == nil || lo == "" || hi == "" {
|
||||
// no paging.
|
||||
return nil
|
||||
}
|
||||
|
|
@ -177,16 +157,27 @@ func (p *Page) Next(max string) *Page {
|
|||
// Set original limit.
|
||||
p2.Limit = p.Limit
|
||||
|
||||
// Create new from old.
|
||||
p2.Max = p.Max.new(max)
|
||||
if p.order().Ascending() {
|
||||
// When ascending, next page
|
||||
// needs to start with min at
|
||||
// the next highest value.
|
||||
p2.Min = p.Min.new(hi)
|
||||
p2.Max = p.Max.new("")
|
||||
} else {
|
||||
// When descending, next page
|
||||
// needs to start with max at
|
||||
// the next lowest value.
|
||||
p2.Min = p.Min.new("")
|
||||
p2.Max = p.Max.new(lo)
|
||||
}
|
||||
|
||||
return p2
|
||||
}
|
||||
|
||||
// Prev creates a new instance for the prev returnable page, using
|
||||
// given min value. This preserves original limit and min key name.
|
||||
func (p *Page) Prev(min string) *Page {
|
||||
if p == nil || min == "" {
|
||||
func (p *Page) Prev(lo, hi string) *Page {
|
||||
if p == nil || lo == "" || hi == "" {
|
||||
// no paging.
|
||||
return nil
|
||||
}
|
||||
|
|
@ -197,55 +188,56 @@ func (p *Page) Prev(min string) *Page {
|
|||
// Set original limit.
|
||||
p2.Limit = p.Limit
|
||||
|
||||
// Create new from old.
|
||||
p2.Min = p.Min.new(min)
|
||||
if p.order().Ascending() {
|
||||
// When ascending, prev page
|
||||
// needs to start with max at
|
||||
// the next lowest value.
|
||||
p2.Min = p.Min.new("")
|
||||
p2.Max = p.Max.new(lo)
|
||||
} else {
|
||||
// When descending, next page
|
||||
// needs to start with max at
|
||||
// the next lowest value.
|
||||
p2.Min = p.Min.new(hi)
|
||||
p2.Max = p.Max.new("")
|
||||
}
|
||||
|
||||
return p2
|
||||
}
|
||||
|
||||
// ToLink builds a URL link for given endpoint information and extra query parameters,
|
||||
// appending this Page's minimum / maximum boundaries and available limit (if any).
|
||||
func (p *Page) ToLink(proto, host, path string, queryParams []string) string {
|
||||
func (p *Page) ToLink(proto, host, path string, queryParams url.Values) string {
|
||||
if p == nil {
|
||||
// no paging.
|
||||
return ""
|
||||
}
|
||||
|
||||
// Check length before
|
||||
// adding boundary params.
|
||||
old := len(queryParams)
|
||||
if queryParams == nil {
|
||||
// Allocate new query parameters.
|
||||
queryParams = make(url.Values)
|
||||
}
|
||||
|
||||
if minParam := p.Min.Query(); minParam != "" {
|
||||
if p.Min.Value != "" {
|
||||
// A page-minimum query parameter is available.
|
||||
queryParams = append(queryParams, minParam)
|
||||
queryParams.Add(p.Min.Name, p.Min.Value)
|
||||
}
|
||||
|
||||
if maxParam := p.Max.Query(); maxParam != "" {
|
||||
if p.Max.Value != "" {
|
||||
// A page-maximum query parameter is available.
|
||||
queryParams = append(queryParams, maxParam)
|
||||
}
|
||||
|
||||
if len(queryParams) == old {
|
||||
// No page boundaries.
|
||||
return ""
|
||||
queryParams.Add(p.Max.Name, p.Max.Value)
|
||||
}
|
||||
|
||||
if p.Limit > 0 {
|
||||
// Build limit key-value query parameter.
|
||||
param := "limit=" + strconv.Itoa(p.Limit)
|
||||
|
||||
// Append `limit=$value` query parameter.
|
||||
queryParams = append(queryParams, param)
|
||||
// A page limit query parameter is available.
|
||||
queryParams.Add("limit", strconv.Itoa(p.Limit))
|
||||
}
|
||||
|
||||
// Join collected params into query str.
|
||||
query := strings.Join(queryParams, "&")
|
||||
|
||||
// Build URL string.
|
||||
return (&url.URL{
|
||||
Scheme: proto,
|
||||
Host: host,
|
||||
Path: path,
|
||||
RawQuery: query,
|
||||
RawQuery: queryParams.Encode(),
|
||||
}).String()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ var cases = []Case{
|
|||
|
||||
// Return page and expected IDs.
|
||||
return ids, &paging.Page{
|
||||
Min: paging.MinID(minID, ""),
|
||||
Min: paging.MinID(minID),
|
||||
Max: paging.MaxID(maxID),
|
||||
}, expect
|
||||
}),
|
||||
|
|
@ -129,7 +129,7 @@ var cases = []Case{
|
|||
|
||||
// Return page and expected IDs.
|
||||
return ids, &paging.Page{
|
||||
Min: paging.MinID(minID, ""),
|
||||
Min: paging.MinID(minID),
|
||||
Max: paging.MaxID(maxID),
|
||||
Limit: limit,
|
||||
}, expect
|
||||
|
|
@ -156,7 +156,7 @@ var cases = []Case{
|
|||
|
||||
// Return page and expected IDs.
|
||||
return ids, &paging.Page{
|
||||
Min: paging.MinID(minID, ""),
|
||||
Min: paging.MinID(minID),
|
||||
Max: paging.MaxID(maxID),
|
||||
Limit: len(ids) * 2,
|
||||
}, expect
|
||||
|
|
@ -182,7 +182,7 @@ var cases = []Case{
|
|||
|
||||
// Return page and expected IDs.
|
||||
return ids, &paging.Page{
|
||||
Min: paging.MinID("", sinceID),
|
||||
Min: paging.SinceID(sinceID),
|
||||
Max: paging.MaxID(maxID),
|
||||
}, expect
|
||||
}),
|
||||
|
|
@ -225,7 +225,7 @@ var cases = []Case{
|
|||
|
||||
// Return page and expected IDs.
|
||||
return ids, &paging.Page{
|
||||
Min: paging.MinID("", sinceID),
|
||||
Min: paging.SinceID(sinceID),
|
||||
}, expect
|
||||
}),
|
||||
CreateCase("minID set", func(ids []string) ([]string, *paging.Page, []string) {
|
||||
|
|
@ -247,7 +247,7 @@ var cases = []Case{
|
|||
|
||||
// Return page and expected IDs.
|
||||
return ids, &paging.Page{
|
||||
Min: paging.MinID(minID, ""),
|
||||
Min: paging.MinID(minID),
|
||||
}, expect
|
||||
}),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,9 +30,9 @@ import (
|
|||
// While conversely, a zero default limit will not enforce paging, returning a nil page value.
|
||||
func ParseIDPage(c *gin.Context, min, max, _default int) (*Page, gtserror.WithCode) {
|
||||
// Extract request query params.
|
||||
sinceID := c.Query("since_id")
|
||||
minID := c.Query("min_id")
|
||||
maxID := c.Query("max_id")
|
||||
sinceID, haveSince := c.GetQuery("since_id")
|
||||
minID, haveMin := c.GetQuery("min_id")
|
||||
maxID, haveMax := c.GetQuery("max_id")
|
||||
|
||||
// Extract request limit parameter.
|
||||
limit, errWithCode := ParseLimit(c, min, max, _default)
|
||||
|
|
@ -40,20 +40,38 @@ func ParseIDPage(c *gin.Context, min, max, _default int) (*Page, gtserror.WithCo
|
|||
return nil, errWithCode
|
||||
}
|
||||
|
||||
if sinceID == "" &&
|
||||
minID == "" &&
|
||||
maxID == "" &&
|
||||
limit == 0 {
|
||||
switch {
|
||||
case haveMin:
|
||||
// A min_id was supplied, even if the value
|
||||
// itself is empty. This indicates ASC order.
|
||||
return &Page{
|
||||
Min: MinID(minID),
|
||||
Max: MaxID(maxID),
|
||||
Limit: limit,
|
||||
}, nil
|
||||
|
||||
case haveMax || haveSince:
|
||||
// A max_id or since_id was supplied, even if the
|
||||
// value itself is empty. This indicates DESC order.
|
||||
return &Page{
|
||||
Min: SinceID(sinceID),
|
||||
Max: MaxID(maxID),
|
||||
Limit: limit,
|
||||
}, nil
|
||||
|
||||
case limit == 0:
|
||||
// No ID paging params provided, and no default
|
||||
// limit value which indicates paging not enforced.
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return &Page{
|
||||
Min: MinID(minID, sinceID),
|
||||
Max: MaxID(maxID),
|
||||
Limit: limit,
|
||||
}, nil
|
||||
default:
|
||||
// only limit.
|
||||
return &Page{
|
||||
Min: SinceID(""),
|
||||
Max: MaxID(""),
|
||||
Limit: limit,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// ParseShortcodeDomainPage parses an emoji shortcode domain Page from a request context, returning BadRequest
|
||||
|
|
@ -62,8 +80,8 @@ func ParseIDPage(c *gin.Context, min, max, _default int) (*Page, gtserror.WithCo
|
|||
// a zero default limit will not enforce paging, returning a nil page value.
|
||||
func ParseShortcodeDomainPage(c *gin.Context, min, max, _default int) (*Page, gtserror.WithCode) {
|
||||
// Extract request query parameters.
|
||||
minShortcode := c.Query("min_shortcode_domain")
|
||||
maxShortcode := c.Query("max_shortcode_domain")
|
||||
minShortcode, haveMin := c.GetQuery("min_shortcode_domain")
|
||||
maxShortcode, haveMax := c.GetQuery("max_shortcode_domain")
|
||||
|
||||
// Extract request limit parameter.
|
||||
limit, errWithCode := ParseLimit(c, min, max, _default)
|
||||
|
|
@ -71,8 +89,8 @@ func ParseShortcodeDomainPage(c *gin.Context, min, max, _default int) (*Page, gt
|
|||
return nil, errWithCode
|
||||
}
|
||||
|
||||
if minShortcode == "" &&
|
||||
maxShortcode == "" &&
|
||||
if !haveMin &&
|
||||
!haveMax &&
|
||||
limit == 0 {
|
||||
// No ID paging params provided, and no default
|
||||
// limit value which indicates paging not enforced.
|
||||
|
|
@ -89,7 +107,10 @@ func ParseShortcodeDomainPage(c *gin.Context, min, max, _default int) (*Page, gt
|
|||
// ParseLimit parses the limit query parameter from a request context, returning BadRequest on error parsing and _default if zero limit given.
|
||||
func ParseLimit(c *gin.Context, min, max, _default int) (int, gtserror.WithCode) {
|
||||
// Get limit query param.
|
||||
str := c.Query("limit")
|
||||
str, ok := c.GetQuery("limit")
|
||||
if !ok {
|
||||
return _default, nil
|
||||
}
|
||||
|
||||
// Attempt to parse limit int.
|
||||
i, err := strconv.Atoi(str)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@
|
|||
package paging
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model"
|
||||
|
|
@ -35,18 +36,13 @@ type ResponseParams struct {
|
|||
Path string // path to use for next/prev queries in the link header
|
||||
Next *Page // page details for the next page
|
||||
Prev *Page // page details for the previous page
|
||||
Query []string // any extra query parameters to provide in the link header, should be in the format 'example=value'
|
||||
Query url.Values // any extra query parameters to provide in the link header, should be in the format 'example=value'
|
||||
}
|
||||
|
||||
// PackageResponse is a convenience function for returning
|
||||
// a bunch of pageable items (notifications, statuses, etc), as well
|
||||
// as a Link header to inform callers of where to find next/prev items.
|
||||
func PackageResponse(params ResponseParams) *apimodel.PageableResponse {
|
||||
if len(params.Items) == 0 {
|
||||
// No items to page through.
|
||||
return EmptyResponse()
|
||||
}
|
||||
|
||||
var (
|
||||
// Extract paging params.
|
||||
nextPg = params.Next
|
||||
|
|
|
|||
|
|
@ -42,9 +42,9 @@ func (suite *PagingSuite) TestPagingStandard() {
|
|||
resp := paging.PackageResponse(params)
|
||||
|
||||
suite.Equal(make([]interface{}, 10, 10), resp.Items)
|
||||
suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10>; rel="next", <https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10>; rel="prev"`, resp.LinkHeader)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10`, resp.NextLink)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10`, resp.PrevLink)
|
||||
suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&max_id=01H11KA1DM2VH3747YDE7FV5HN>; rel="next", <https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&min_id=01H11KBBVRRDYYC5KEPME1NP5R>; rel="prev"`, resp.LinkHeader)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&max_id=01H11KA1DM2VH3747YDE7FV5HN`, resp.NextLink)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&min_id=01H11KBBVRRDYYC5KEPME1NP5R`, resp.PrevLink)
|
||||
}
|
||||
|
||||
func (suite *PagingSuite) TestPagingNoLimit() {
|
||||
|
|
@ -77,9 +77,9 @@ func (suite *PagingSuite) TestPagingNoNextID() {
|
|||
resp := paging.PackageResponse(params)
|
||||
|
||||
suite.Equal(make([]interface{}, 10, 10), resp.Items)
|
||||
suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10>; rel="prev"`, resp.LinkHeader)
|
||||
suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&min_id=01H11KBBVRRDYYC5KEPME1NP5R>; rel="prev"`, resp.LinkHeader)
|
||||
suite.Equal(``, resp.NextLink)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10`, resp.PrevLink)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&min_id=01H11KBBVRRDYYC5KEPME1NP5R`, resp.PrevLink)
|
||||
}
|
||||
|
||||
func (suite *PagingSuite) TestPagingNoPrevID() {
|
||||
|
|
@ -94,27 +94,11 @@ func (suite *PagingSuite) TestPagingNoPrevID() {
|
|||
resp := paging.PackageResponse(params)
|
||||
|
||||
suite.Equal(make([]interface{}, 10, 10), resp.Items)
|
||||
suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10>; rel="next"`, resp.LinkHeader)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10`, resp.NextLink)
|
||||
suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&max_id=01H11KA1DM2VH3747YDE7FV5HN>; rel="next"`, resp.LinkHeader)
|
||||
suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?limit=10&max_id=01H11KA1DM2VH3747YDE7FV5HN`, resp.NextLink)
|
||||
suite.Equal(``, resp.PrevLink)
|
||||
}
|
||||
|
||||
func (suite *PagingSuite) TestPagingNoItems() {
|
||||
config.SetHost("example.org")
|
||||
|
||||
params := paging.ResponseParams{
|
||||
Next: nextPage("01H11KA1DM2VH3747YDE7FV5HN", 10),
|
||||
Prev: prevPage("01H11KBBVRRDYYC5KEPME1NP5R", 10),
|
||||
}
|
||||
|
||||
resp := paging.PackageResponse(params)
|
||||
|
||||
suite.Empty(resp.Items)
|
||||
suite.Empty(resp.LinkHeader)
|
||||
suite.Empty(resp.NextLink)
|
||||
suite.Empty(resp.PrevLink)
|
||||
}
|
||||
|
||||
func TestPagingSuite(t *testing.T) {
|
||||
suite.Run(t, &PagingSuite{})
|
||||
}
|
||||
|
|
@ -128,7 +112,7 @@ func nextPage(id string, limit int) *paging.Page {
|
|||
|
||||
func prevPage(id string, limit int) *paging.Page {
|
||||
return &paging.Page{
|
||||
Min: paging.MinID(id, ""),
|
||||
Min: paging.MinID(id),
|
||||
Limit: limit,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,9 +41,3 @@ func Reverse(in []string) []string {
|
|||
|
||||
return in
|
||||
}
|
||||
|
||||
// zero is a shorthand to check a generic value is its zero value.
|
||||
func zero[T comparable](t T) bool {
|
||||
var z T
|
||||
return t == z
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue