mirror of
				https://github.com/superseriousbusiness/gotosocial.git
				synced 2025-10-30 22:32:25 -05:00 
			
		
		
		
	[chore] much improved paging package (#2182)
This commit is contained in:
		
					parent
					
						
							
								14ef098099
							
						
					
				
			
			
				commit
				
					
						b093947d84
					
				
			
		
					 15 changed files with 1154 additions and 445 deletions
				
			
		|  | @ -103,8 +103,12 @@ func (m *Module) BlocksGETHandler(c *gin.Context) { | |||
| 		return | ||||
| 	} | ||||
| 
 | ||||
| 	limit, errWithCode := apiutil.ParseLimit(c.Query(LimitKey), 20, 100, 2) | ||||
| 	if err != nil { | ||||
| 	page, errWithCode := paging.ParseIDPage(c, | ||||
| 		1,   // min limit | ||||
| 		100, // max limit | ||||
| 		20,  // default limit | ||||
| 	) | ||||
| 	if errWithCode != nil { | ||||
| 		apiutil.ErrorHandler(c, errWithCode, m.processor.InstanceGetV1) | ||||
| 		return | ||||
| 	} | ||||
|  | @ -112,11 +116,7 @@ func (m *Module) BlocksGETHandler(c *gin.Context) { | |||
| 	resp, errWithCode := m.processor.BlocksGet( | ||||
| 		c.Request.Context(), | ||||
| 		authed.Account, | ||||
| 		paging.Pager{ | ||||
| 			SinceID: c.Query(SinceIDKey), | ||||
| 			MaxID:   c.Query(MaxIDKey), | ||||
| 			Limit:   limit, | ||||
| 		}, | ||||
| 		page, | ||||
| 	) | ||||
| 	if errWithCode != nil { | ||||
| 		apiutil.ErrorHandler(c, errWithCode, m.processor.InstanceGetV1) | ||||
|  |  | |||
							
								
								
									
										25
									
								
								internal/cache/slice.go
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										25
									
								
								internal/cache/slice.go
									
										
									
									
										vendored
									
									
								
							|  | @ -49,28 +49,3 @@ func (c *SliceCache[T]) Load(key string, load func() ([]T, error)) ([]T, error) | |||
| 	// Return data clone for safety. | ||||
| 	return slices.Clone(data), nil | ||||
| } | ||||
| 
 | ||||
| // LoadRange is functionally the same as .Load(), but will pass the result through provided reslice function before returning a cloned result. | ||||
| func (c *SliceCache[T]) LoadRange(key string, load func() ([]T, error), reslice func([]T) []T) ([]T, error) { | ||||
| 	// Look for follow IDs list in cache under this key. | ||||
| 	data, ok := c.Get(key) | ||||
| 
 | ||||
| 	if !ok { | ||||
| 		var err error | ||||
| 
 | ||||
| 		// Not cached, load! | ||||
| 		data, err = load() | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 
 | ||||
| 		// Store the data. | ||||
| 		c.Set(key, data) | ||||
| 	} | ||||
| 
 | ||||
| 	// Reslice to range. | ||||
| 	slice := reslice(data) | ||||
| 
 | ||||
| 	// Return range clone for safety. | ||||
| 	return slices.Clone(slice), nil | ||||
| } | ||||
|  |  | |||
|  | @ -150,9 +150,9 @@ func (r *relationshipDB) GetAccountFollowRequesting(ctx context.Context, account | |||
| 	return r.GetFollowRequestsByIDs(ctx, followReqIDs) | ||||
| } | ||||
| 
 | ||||
| func (r *relationshipDB) GetAccountBlocks(ctx context.Context, accountID string, page *paging.Pager) ([]*gtsmodel.Block, error) { | ||||
| func (r *relationshipDB) GetAccountBlocks(ctx context.Context, accountID string, page *paging.Page) ([]*gtsmodel.Block, error) { | ||||
| 	// Load block IDs from cache with database loader callback. | ||||
| 	blockIDs, err := r.state.Caches.GTS.BlockIDs().LoadRange(accountID, func() ([]string, error) { | ||||
| 	blockIDs, err := r.state.Caches.GTS.BlockIDs().Load(accountID, func() ([]string, error) { | ||||
| 		var blockIDs []string | ||||
| 
 | ||||
| 		// Block IDs not in cache, perform DB query! | ||||
|  | @ -162,11 +162,22 @@ func (r *relationshipDB) GetAccountBlocks(ctx context.Context, accountID string, | |||
| 		} | ||||
| 
 | ||||
| 		return blockIDs, nil | ||||
| 	}, page.PageDesc) | ||||
| 	}) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 
 | ||||
| 	// Our cached / selected block IDs are | ||||
| 	// ALWAYS stored in descending order. | ||||
| 	// Depending on the paging requested | ||||
| 	// this may be an unexpected order. | ||||
| 	if !page.GetOrder().Ascending() { | ||||
| 		blockIDs = paging.Reverse(blockIDs) | ||||
| 	} | ||||
| 
 | ||||
| 	// Page the resulting block IDs. | ||||
| 	blockIDs = page.Page(blockIDs) | ||||
| 
 | ||||
| 	// Convert these IDs to full block objects. | ||||
| 	return r.GetBlocksByIDs(ctx, blockIDs) | ||||
| } | ||||
|  |  | |||
|  | @ -174,7 +174,7 @@ type Relationship interface { | |||
| 	CountAccountFollowRequesting(ctx context.Context, accountID string) (int, error) | ||||
| 
 | ||||
| 	// GetAccountBlocks returns all blocks originating from the given account, with given optional paging parameters. | ||||
| 	GetAccountBlocks(ctx context.Context, accountID string, paging *paging.Pager) ([]*gtsmodel.Block, error) | ||||
| 	GetAccountBlocks(ctx context.Context, accountID string, paging *paging.Page) ([]*gtsmodel.Block, error) | ||||
| 
 | ||||
| 	// GetNote gets a private note from a source account on a target account, if it exists. | ||||
| 	GetNote(ctx context.Context, sourceAccountID string, targetAccountID string) (*gtsmodel.AccountNote, error) | ||||
|  |  | |||
							
								
								
									
										135
									
								
								internal/paging/boundary.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								internal/paging/boundary.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,135 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| // MinID returns an ID boundary with given min ID value, | ||||
| // using either the `since_id`,"DESC" name,ordering or | ||||
| // `min_id`,"ASC" name,ordering depending on which is set. | ||||
| func MinID(minID, sinceID string) Boundary { | ||||
| 	/* | ||||
| 
 | ||||
| 	           Paging with `since_id` vs `min_id`: | ||||
| 
 | ||||
| 	                limit = 4       limit = 4 | ||||
| 	               +----------+    +----------+ | ||||
| 	     max_id--> |xxxxxxxxxx|    |          | <-- max_id | ||||
| 	               +----------+    +----------+ | ||||
| 	               |xxxxxxxxxx|    |          | | ||||
| 	               +----------+    +----------+ | ||||
| 	               |xxxxxxxxxx|    |          | | ||||
| 	               +----------+    +----------+ | ||||
| 	               |xxxxxxxxxx|    |xxxxxxxxxx| | ||||
| 	               +----------+    +----------+ | ||||
| 	               |          |    |xxxxxxxxxx| | ||||
| 	               +----------+    +----------+ | ||||
| 	               |          |    |xxxxxxxxxx| | ||||
| 	               +----------+    +----------+ | ||||
| 	   since_id--> |          |    |xxxxxxxxxx| <-- min_id | ||||
| 	               +----------+    +----------+ | ||||
| 	               |          |    |          | | ||||
| 	               +----------+    +----------+ | ||||
| 
 | ||||
| 	*/ | ||||
| 	switch { | ||||
| 	case minID != "": | ||||
| 		return Boundary{ | ||||
| 			Name:  "min_id", | ||||
| 			Value: minID, | ||||
| 			Order: OrderAscending, | ||||
| 		} | ||||
| 	default: | ||||
| 		// default min is `since_id` | ||||
| 		return Boundary{ | ||||
| 			Name:  "since_id", | ||||
| 			Value: sinceID, | ||||
| 			Order: OrderDescending, | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // MaxID returns an ID boundary with given max | ||||
| // ID value, and the "max_id" query key set. | ||||
| func MaxID(maxID string) Boundary { | ||||
| 	return Boundary{ | ||||
| 		Name:  "max_id", | ||||
| 		Value: maxID, | ||||
| 		Order: OrderDescending, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // MinShortcodeDomain returns a boundary with the given minimum emoji | ||||
| // shortcode@domain, and the "min_shortcode_domain" query key set. | ||||
| func MinShortcodeDomain(min string) Boundary { | ||||
| 	return Boundary{ | ||||
| 		Name:  "min_shortcode_domain", | ||||
| 		Value: min, | ||||
| 		Order: OrderAscending, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // MaxShortcodeDomain returns a boundary with the given maximum emoji | ||||
| // shortcode@domain, and the "max_shortcode_domain" query key set. | ||||
| func MaxShortcodeDomain(max string) Boundary { | ||||
| 	return Boundary{ | ||||
| 		Name:  "max_shortcode_domain", | ||||
| 		Value: max, | ||||
| 		Order: OrderDescending, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Boundary represents the upper or lower limit in a page slice. | ||||
| type Boundary struct { | ||||
| 	Name  string // i.e. query key | ||||
| 	Value string | ||||
| 	Order Order // NOTE: see Order type for explanation | ||||
| } | ||||
| 
 | ||||
| // new creates a new Boundary with the same ordering and name | ||||
| // as the original (receiving), but with the new provided value. | ||||
| func (b Boundary) new(value string) Boundary { | ||||
| 	return Boundary{ | ||||
| 		Name:  b.Name, | ||||
| 		Value: value, | ||||
| 		Order: b.Order, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Find finds the boundary's set value in input slice, or returns -1. | ||||
| func (b Boundary) Find(in []string) int { | ||||
| 	if zero(b.Value) { | ||||
| 		return -1 | ||||
| 	} | ||||
| 	for i := range in { | ||||
| 		if in[i] == b.Value { | ||||
| 			return i | ||||
| 		} | ||||
| 	} | ||||
| 	return -1 | ||||
| } | ||||
| 
 | ||||
| // Query returns this boundary as assembled query key=value pair. | ||||
| func (b Boundary) Query() string { | ||||
| 	switch { | ||||
| 	case zero(b.Value): | ||||
| 		return "" | ||||
| 	case b.Name == "": | ||||
| 		panic("value without boundary name") | ||||
| 	default: | ||||
| 		return b.Name + "=" + b.Value | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										55
									
								
								internal/paging/order.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								internal/paging/order.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,55 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| // Order represents the order an input | ||||
| // page should be sorted and paged in. | ||||
| // | ||||
| // NOTE: this does not effect the order of returned | ||||
| // API results, which must always be in descending | ||||
| // order. This behaviour is confusing, but we adopt | ||||
| // it to stay inline with Mastodon API expectations. | ||||
| type Order int | ||||
| 
 | ||||
| const ( | ||||
| 	_default Order = iota | ||||
| 	OrderDescending | ||||
| 	OrderAscending | ||||
| ) | ||||
| 
 | ||||
| // Ascending returns whether this Order is ascending. | ||||
| func (i Order) Ascending() bool { | ||||
| 	return i == OrderAscending | ||||
| } | ||||
| 
 | ||||
| // Descending returns whether this Order is descending. | ||||
| func (i Order) Descending() bool { | ||||
| 	return i == OrderDescending | ||||
| } | ||||
| 
 | ||||
| // String returns a string representation of Order. | ||||
| func (i Order) String() string { | ||||
| 	switch i { | ||||
| 	case OrderDescending: | ||||
| 		return "Descending" | ||||
| 	case OrderAscending: | ||||
| 		return "Ascending" | ||||
| 	default: | ||||
| 		return "not-specified" | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										251
									
								
								internal/paging/page.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										251
									
								
								internal/paging/page.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,251 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| import ( | ||||
| 	"net/url" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 
 | ||||
| 	"golang.org/x/exp/slices" | ||||
| ) | ||||
| 
 | ||||
| type Page struct { | ||||
| 	// Min is the Page's lower limit value. | ||||
| 	Min Boundary | ||||
| 
 | ||||
| 	// Max is this Page's upper limit value. | ||||
| 	Max Boundary | ||||
| 
 | ||||
| 	// Limit will limit the returned | ||||
| 	// page of items to at most 'limit'. | ||||
| 	Limit int | ||||
| } | ||||
| 
 | ||||
| // GetMin is a small helper function to return minimum boundary value (checking for nil page). | ||||
| func (p *Page) GetMin() string { | ||||
| 	if p == nil { | ||||
| 		return "" | ||||
| 	} | ||||
| 	return p.Min.Value | ||||
| } | ||||
| 
 | ||||
| // GetMax is a small helper function to return maximum boundary value (checking for nil page). | ||||
| func (p *Page) GetMax() string { | ||||
| 	if p == nil { | ||||
| 		return "" | ||||
| 	} | ||||
| 	return p.Max.Value | ||||
| } | ||||
| 
 | ||||
| // GetLimit is a small helper function to return limit (checking for nil page and unusable limit). | ||||
| func (p *Page) GetLimit() int { | ||||
| 	if p == nil || p.Limit < 0 { | ||||
| 		return 0 | ||||
| 	} | ||||
| 	return p.Limit | ||||
| } | ||||
| 
 | ||||
| // GetOrder is a small helper function to return page sort ordering (checking for nil page). | ||||
| func (p *Page) GetOrder() Order { | ||||
| 	if p == nil { | ||||
| 		return 0 | ||||
| 	} | ||||
| 	return p.order() | ||||
| } | ||||
| 
 | ||||
| func (p *Page) order() Order { | ||||
| 	var ( | ||||
| 		// Check if min/max values set. | ||||
| 		minValue = zero(p.Min.Value) | ||||
| 		maxValue = zero(p.Max.Value) | ||||
| 
 | ||||
| 		// Check if min/max orders set. | ||||
| 		minOrder = (p.Min.Order != 0) | ||||
| 		maxOrder = (p.Max.Order != 0) | ||||
| 	) | ||||
| 
 | ||||
| 	switch { | ||||
| 	// Boundaries with a value AND order set | ||||
| 	// take priority. Min always comes first. | ||||
| 	case minValue && minOrder: | ||||
| 		return p.Min.Order | ||||
| 	case maxValue && maxOrder: | ||||
| 		return p.Max.Order | ||||
| 	case minOrder: | ||||
| 		return p.Min.Order | ||||
| 	case maxOrder: | ||||
| 		return p.Max.Order | ||||
| 	default: | ||||
| 		return 0 | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // Page will page the given slice of input according | ||||
| // to the receiving Page's minimum, maximum and limit. | ||||
| // NOTE: input slice MUST be sorted according to the order is | ||||
| // expected to be paged in, i.e. it is currently sorted | ||||
| // according to Page.Order(). Sorted data isn't always according | ||||
| // to string inequalities so this CANNOT be checked here. | ||||
| func (p *Page) Page(in []string) []string { | ||||
| 	if p == nil { | ||||
| 		// no paging. | ||||
| 		return in | ||||
| 	} | ||||
| 
 | ||||
| 	if o := p.order(); !o.Ascending() { | ||||
| 		// Default sort is descending, | ||||
| 		// catching all cases when NOT | ||||
| 		// ascending (even zero value). | ||||
| 		// | ||||
| 		// NOTE: sorted data does not always | ||||
| 		// occur according to string ineqs | ||||
| 		// so we unfortunately cannot check. | ||||
| 
 | ||||
| 		if maxIdx := p.Max.Find(in); maxIdx != -1 { | ||||
| 			// Reslice skipping up to max. | ||||
| 			in = in[maxIdx+1:] | ||||
| 		} | ||||
| 
 | ||||
| 		if minIdx := p.Min.Find(in); minIdx != -1 { | ||||
| 			// Reslice stripping past min. | ||||
| 			in = in[:minIdx] | ||||
| 		} | ||||
| 	} else { | ||||
| 		// Sort type is ascending, input | ||||
| 		// data is assumed to be ascending. | ||||
| 		// | ||||
| 		// NOTE: sorted data does not always | ||||
| 		// occur according to string ineqs | ||||
| 		// so we unfortunately cannot check. | ||||
| 
 | ||||
| 		if minIdx := p.Min.Find(in); minIdx != -1 { | ||||
| 			// Reslice skipping up to min. | ||||
| 			in = in[minIdx+1:] | ||||
| 		} | ||||
| 
 | ||||
| 		if maxIdx := p.Max.Find(in); maxIdx != -1 { | ||||
| 			// Reslice stripping past max. | ||||
| 			in = in[:maxIdx] | ||||
| 		} | ||||
| 
 | ||||
| 		if len(in) > 1 { | ||||
| 			// Clone input before | ||||
| 			// any modifications. | ||||
| 			in = slices.Clone(in) | ||||
| 
 | ||||
| 			// Output slice must | ||||
| 			// ALWAYS be descending. | ||||
| 			in = Reverse(in) | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if p.Limit > 0 && p.Limit < len(in) { | ||||
| 		// Reslice input to limit. | ||||
| 		in = in[:p.Limit] | ||||
| 	} | ||||
| 
 | ||||
| 	return in | ||||
| } | ||||
| 
 | ||||
| // Next creates a new instance for the next returnable page, using | ||||
| // given max value. This preserves original limit and max key name. | ||||
| func (p *Page) Next(max string) *Page { | ||||
| 	if p == nil || max == "" { | ||||
| 		// no paging. | ||||
| 		return nil | ||||
| 	} | ||||
| 
 | ||||
| 	// Create new page. | ||||
| 	p2 := new(Page) | ||||
| 
 | ||||
| 	// Set original limit. | ||||
| 	p2.Limit = p.Limit | ||||
| 
 | ||||
| 	// Create new from old. | ||||
| 	p2.Max = p.Max.new(max) | ||||
| 
 | ||||
| 	return p2 | ||||
| } | ||||
| 
 | ||||
| // Prev creates a new instance for the prev returnable page, using | ||||
| // given min value. This preserves original limit and min key name. | ||||
| func (p *Page) Prev(min string) *Page { | ||||
| 	if p == nil || min == "" { | ||||
| 		// no paging. | ||||
| 		return nil | ||||
| 	} | ||||
| 
 | ||||
| 	// Create new page. | ||||
| 	p2 := new(Page) | ||||
| 
 | ||||
| 	// Set original limit. | ||||
| 	p2.Limit = p.Limit | ||||
| 
 | ||||
| 	// Create new from old. | ||||
| 	p2.Min = p.Min.new(min) | ||||
| 
 | ||||
| 	return p2 | ||||
| } | ||||
| 
 | ||||
| // ToLink builds a URL link for given endpoint information and extra query parameters, | ||||
| // appending this Page's minimum / maximum boundaries and available limit (if any). | ||||
| func (p *Page) ToLink(proto, host, path string, queryParams []string) string { | ||||
| 	if p == nil { | ||||
| 		// no paging. | ||||
| 		return "" | ||||
| 	} | ||||
| 
 | ||||
| 	// Check length before | ||||
| 	// adding boundary params. | ||||
| 	old := len(queryParams) | ||||
| 
 | ||||
| 	if minParam := p.Min.Query(); minParam != "" { | ||||
| 		// A page-minimum query parameter is available. | ||||
| 		queryParams = append(queryParams, minParam) | ||||
| 	} | ||||
| 
 | ||||
| 	if maxParam := p.Max.Query(); maxParam != "" { | ||||
| 		// A page-maximum query parameter is available. | ||||
| 		queryParams = append(queryParams, maxParam) | ||||
| 	} | ||||
| 
 | ||||
| 	if len(queryParams) == old { | ||||
| 		// No page boundaries. | ||||
| 		return "" | ||||
| 	} | ||||
| 
 | ||||
| 	if p.Limit > 0 { | ||||
| 		// Build limit key-value query parameter. | ||||
| 		param := "limit=" + strconv.Itoa(p.Limit) | ||||
| 
 | ||||
| 		// Append `limit=$value` query parameter. | ||||
| 		queryParams = append(queryParams, param) | ||||
| 	} | ||||
| 
 | ||||
| 	// Join collected params into query str. | ||||
| 	query := strings.Join(queryParams, "&") | ||||
| 
 | ||||
| 	// Build URL string. | ||||
| 	return (&url.URL{ | ||||
| 		Scheme:   proto, | ||||
| 		Host:     host, | ||||
| 		Path:     path, | ||||
| 		RawQuery: query, | ||||
| 	}).String() | ||||
| } | ||||
							
								
								
									
										298
									
								
								internal/paging/page_test.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										298
									
								
								internal/paging/page_test.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,298 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging_test | ||||
| 
 | ||||
| import ( | ||||
| 	"math/rand" | ||||
| 	"testing" | ||||
| 	"time" | ||||
| 
 | ||||
| 	"github.com/oklog/ulid" | ||||
| 	"github.com/superseriousbusiness/gotosocial/internal/paging" | ||||
| 	"golang.org/x/exp/slices" | ||||
| ) | ||||
| 
 | ||||
| // random reader according to current-time source seed. | ||||
| var randRd = rand.New(rand.NewSource(time.Now().Unix())) | ||||
| 
 | ||||
| type Case struct { | ||||
| 	// Name is the test case name. | ||||
| 	Name string | ||||
| 
 | ||||
| 	// Page to use for test. | ||||
| 	Page *paging.Page | ||||
| 
 | ||||
| 	// Input contains test case input ID slice. | ||||
| 	Input []string | ||||
| 
 | ||||
| 	// Expect contains expected test case output. | ||||
| 	Expect []string | ||||
| } | ||||
| 
 | ||||
| // CreateCase creates a new test case with random input for function defining test page parameters and expected output. | ||||
| func CreateCase(name string, getParams func([]string) (input []string, page *paging.Page, expect []string)) Case { | ||||
| 	i := randRd.Intn(100) | ||||
| 	in := generateSlice(i) | ||||
| 	input, page, expect := getParams(in) | ||||
| 	return Case{ | ||||
| 		Name:   name, | ||||
| 		Page:   page, | ||||
| 		Input:  input, | ||||
| 		Expect: expect, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func TestPage(t *testing.T) { | ||||
| 	for _, c := range cases { | ||||
| 		t.Run(c.Name, func(t *testing.T) { | ||||
| 			// Page the input slice. | ||||
| 			out := c.Page.Page(c.Input) | ||||
| 
 | ||||
| 			// Log the results for case of error returns. | ||||
| 			t.Logf("\ninput=%v\noutput=%v\nexpected=%v", c.Input, out, c.Expect) | ||||
| 
 | ||||
| 			// Check paged output is as expected. | ||||
| 			if !slices.Equal(out, c.Expect) { | ||||
| 				t.Error("unexpected paged output") | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| var cases = []Case{ | ||||
| 	CreateCase("minID and maxID set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted ascending for min_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a > b // i.e. largest at lowest idx | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random indices in slice. | ||||
| 		minIdx := randRd.Intn(len(ids)) | ||||
| 		maxIdx := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		minID := ids[minIdx] | ||||
| 		maxID := ids[maxIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutLower(expect, minID) | ||||
| 		expect = cutUpper(expect, maxID) | ||||
| 		expect = paging.Reverse(expect) | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Min: paging.MinID(minID, ""), | ||||
| 			Max: paging.MaxID(maxID), | ||||
| 		}, expect | ||||
| 	}), | ||||
| 	CreateCase("minID, maxID and limit set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted ascending for min_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a > b // i.e. largest at lowest idx | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random parameters in slice. | ||||
| 		minIdx := randRd.Intn(len(ids)) | ||||
| 		maxIdx := randRd.Intn(len(ids)) | ||||
| 		limit := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		minID := ids[minIdx] | ||||
| 		maxID := ids[maxIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutLower(expect, minID) | ||||
| 		expect = cutUpper(expect, maxID) | ||||
| 		expect = paging.Reverse(expect) | ||||
| 
 | ||||
| 		// Now limit the slice. | ||||
| 		if limit < len(expect) { | ||||
| 			expect = expect[:limit] | ||||
| 		} | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Min:   paging.MinID(minID, ""), | ||||
| 			Max:   paging.MaxID(maxID), | ||||
| 			Limit: limit, | ||||
| 		}, expect | ||||
| 	}), | ||||
| 	CreateCase("minID, maxID and too-large limit set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted ascending for min_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a > b // i.e. largest at lowest idx | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random parameters in slice. | ||||
| 		minIdx := randRd.Intn(len(ids)) | ||||
| 		maxIdx := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		minID := ids[minIdx] | ||||
| 		maxID := ids[maxIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutLower(expect, minID) | ||||
| 		expect = cutUpper(expect, maxID) | ||||
| 		expect = paging.Reverse(expect) | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Min:   paging.MinID(minID, ""), | ||||
| 			Max:   paging.MaxID(maxID), | ||||
| 			Limit: len(ids) * 2, | ||||
| 		}, expect | ||||
| 	}), | ||||
| 	CreateCase("sinceID and maxID set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted descending for since_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a < b // i.e. smallest at lowest idx | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random indices in slice. | ||||
| 		sinceIdx := randRd.Intn(len(ids)) | ||||
| 		maxIdx := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		sinceID := ids[sinceIdx] | ||||
| 		maxID := ids[maxIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutLower(expect, maxID) | ||||
| 		expect = cutUpper(expect, sinceID) | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Min: paging.MinID("", sinceID), | ||||
| 			Max: paging.MaxID(maxID), | ||||
| 		}, expect | ||||
| 	}), | ||||
| 	CreateCase("maxID set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted descending for max_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a < b // i.e. smallest at lowest idx | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random indices in slice. | ||||
| 		maxIdx := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		maxID := ids[maxIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutLower(expect, maxID) | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Max: paging.MaxID(maxID), | ||||
| 		}, expect | ||||
| 	}), | ||||
| 	CreateCase("sinceID set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted descending for since_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a < b | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random indices in slice. | ||||
| 		sinceIdx := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		sinceID := ids[sinceIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutUpper(expect, sinceID) | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Min: paging.MinID("", sinceID), | ||||
| 		}, expect | ||||
| 	}), | ||||
| 	CreateCase("minID set", func(ids []string) ([]string, *paging.Page, []string) { | ||||
| 		// Ensure input slice sorted ascending for min_id | ||||
| 		slices.SortFunc(ids, func(a, b string) bool { | ||||
| 			return a > b // i.e. largest at lowest idx | ||||
| 		}) | ||||
| 
 | ||||
| 		// Select random indices in slice. | ||||
| 		minIdx := randRd.Intn(len(ids)) | ||||
| 
 | ||||
| 		// Select the boundaries. | ||||
| 		minID := ids[minIdx] | ||||
| 
 | ||||
| 		// Create expected output. | ||||
| 		expect := slices.Clone(ids) | ||||
| 		expect = cutLower(expect, minID) | ||||
| 		expect = paging.Reverse(expect) | ||||
| 
 | ||||
| 		// Return page and expected IDs. | ||||
| 		return ids, &paging.Page{ | ||||
| 			Min: paging.MinID(minID, ""), | ||||
| 		}, expect | ||||
| 	}), | ||||
| } | ||||
| 
 | ||||
| // cutLower cuts off the lower part of the slice from `bound` downwards. | ||||
| func cutLower(in []string, bound string) []string { | ||||
| 	for i := 0; i < len(in); i++ { | ||||
| 		if in[i] == bound { | ||||
| 			return in[i+1:] | ||||
| 		} | ||||
| 	} | ||||
| 	return in | ||||
| } | ||||
| 
 | ||||
| // cutUpper cuts off the upper part of the slice from `bound` onwards. | ||||
| func cutUpper(in []string, bound string) []string { | ||||
| 	for i := 0; i < len(in); i++ { | ||||
| 		if in[i] == bound { | ||||
| 			return in[:i] | ||||
| 		} | ||||
| 	} | ||||
| 	return in | ||||
| } | ||||
| 
 | ||||
| // generateSlice generates a new slice of len containing ascending sorted slice. | ||||
| func generateSlice(len int) []string { | ||||
| 	if len <= 0 { | ||||
| 		// minimum testable | ||||
| 		// pageable amount | ||||
| 		len = 2 | ||||
| 	} | ||||
| 	now := time.Now() | ||||
| 	in := make([]string, len) | ||||
| 	for i := 0; i < len; i++ { | ||||
| 		// Convert now to timestamp. | ||||
| 		t := ulid.Timestamp(now) | ||||
| 
 | ||||
| 		// Create anew ulid for now. | ||||
| 		u := ulid.MustNew(t, randRd) | ||||
| 
 | ||||
| 		// Add to slice. | ||||
| 		in[i] = u.String() | ||||
| 
 | ||||
| 		// Bump now by 1 second. | ||||
| 		now = now.Add(time.Second) | ||||
| 	} | ||||
| 	return in | ||||
| } | ||||
|  | @ -1,227 +0,0 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| import "golang.org/x/exp/slices" | ||||
| 
 | ||||
| // Pager provides a means of paging serialized IDs, | ||||
| // using the terminology of our API endpoint queries. | ||||
| type Pager struct { | ||||
| 	// SinceID will limit the returned | ||||
| 	// page of IDs to contain newer than | ||||
| 	// since ID (excluding it). Result | ||||
| 	// will be returned DESCENDING. | ||||
| 	SinceID string | ||||
| 
 | ||||
| 	// MinID will limit the returned | ||||
| 	// page of IDs to contain newer than | ||||
| 	// min ID (excluding it). Result | ||||
| 	// will be returned ASCENDING. | ||||
| 	MinID string | ||||
| 
 | ||||
| 	// MaxID will limit the returned | ||||
| 	// page of IDs to contain older | ||||
| 	// than (excluding) this max ID. | ||||
| 	MaxID string | ||||
| 
 | ||||
| 	// Limit will limit the returned | ||||
| 	// page of IDs to at most 'limit'. | ||||
| 	Limit int | ||||
| } | ||||
| 
 | ||||
| // Page will page the given slice of GoToSocial IDs according | ||||
| // to the receiving Pager's SinceID, MinID, MaxID and Limits. | ||||
| // NOTE THE INPUT SLICE MUST BE SORTED IN ASCENDING ORDER | ||||
| // (I.E. OLDEST ITEMS AT LOWEST INDICES, NEWER AT HIGHER). | ||||
| func (p *Pager) PageAsc(ids []string) []string { | ||||
| 	if p == nil { | ||||
| 		// no paging. | ||||
| 		return ids | ||||
| 	} | ||||
| 
 | ||||
| 	var asc bool | ||||
| 
 | ||||
| 	if p.SinceID != "" { | ||||
| 		// If a sinceID is given, we | ||||
| 		// page down i.e. descending. | ||||
| 		asc = false | ||||
| 
 | ||||
| 		for i := 0; i < len(ids); i++ { | ||||
| 			if ids[i] == p.SinceID { | ||||
| 				// Hit the boundary. | ||||
| 				// Reslice to be: | ||||
| 				// "from here" | ||||
| 				ids = ids[i+1:] | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} else if p.MinID != "" { | ||||
| 		// We only support minID if | ||||
| 		// no sinceID is provided. | ||||
| 		// | ||||
| 		// If a minID is given, we | ||||
| 		// page up, i.e. ascending. | ||||
| 		asc = true | ||||
| 
 | ||||
| 		for i := 0; i < len(ids); i++ { | ||||
| 			if ids[i] == p.MinID { | ||||
| 				// Hit the boundary. | ||||
| 				// Reslice to be: | ||||
| 				// "from here" | ||||
| 				ids = ids[i+1:] | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if p.MaxID != "" { | ||||
| 		for i := 0; i < len(ids); i++ { | ||||
| 			if ids[i] == p.MaxID { | ||||
| 				// Hit the boundary. | ||||
| 				// Reslice to be: | ||||
| 				// "up to here" | ||||
| 				ids = ids[:i] | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if !asc && len(ids) > 1 { | ||||
| 		var ( | ||||
| 			// Start at front. | ||||
| 			i = 0 | ||||
| 
 | ||||
| 			// Start at back. | ||||
| 			j = len(ids) - 1 | ||||
| 		) | ||||
| 
 | ||||
| 		// Clone input IDs before | ||||
| 		// we perform modifications. | ||||
| 		ids = slices.Clone(ids) | ||||
| 
 | ||||
| 		for i < j { | ||||
| 			// Swap i,j index values in slice. | ||||
| 			ids[i], ids[j] = ids[j], ids[i] | ||||
| 
 | ||||
| 			// incr + decr, | ||||
| 			// looping until | ||||
| 			// they meet in | ||||
| 			// the middle. | ||||
| 			i++ | ||||
| 			j-- | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if p.Limit > 0 && p.Limit < len(ids) { | ||||
| 		// Reslice IDs to given limit. | ||||
| 		ids = ids[:p.Limit] | ||||
| 	} | ||||
| 
 | ||||
| 	return ids | ||||
| } | ||||
| 
 | ||||
| // Page will page the given slice of GoToSocial IDs according | ||||
| // to the receiving Pager's SinceID, MinID, MaxID and Limits. | ||||
| // NOTE THE INPUT SLICE MUST BE SORTED IN ASCENDING ORDER. | ||||
| // (I.E. NEWEST ITEMS AT LOWEST INDICES, OLDER AT HIGHER). | ||||
| func (p *Pager) PageDesc(ids []string) []string { | ||||
| 	if p == nil { | ||||
| 		// no paging. | ||||
| 		return ids | ||||
| 	} | ||||
| 
 | ||||
| 	var asc bool | ||||
| 
 | ||||
| 	if p.MaxID != "" { | ||||
| 		for i := 0; i < len(ids); i++ { | ||||
| 			if ids[i] == p.MaxID { | ||||
| 				// Hit the boundary. | ||||
| 				// Reslice to be: | ||||
| 				// "from here" | ||||
| 				ids = ids[i+1:] | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if p.SinceID != "" { | ||||
| 		// If a sinceID is given, we | ||||
| 		// page down i.e. descending. | ||||
| 		asc = false | ||||
| 
 | ||||
| 		for i := 0; i < len(ids); i++ { | ||||
| 			if ids[i] == p.SinceID { | ||||
| 				// Hit the boundary. | ||||
| 				// Reslice to be: | ||||
| 				// "up to here" | ||||
| 				ids = ids[:i] | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} else if p.MinID != "" { | ||||
| 		// We only support minID if | ||||
| 		// no sinceID is provided. | ||||
| 		// | ||||
| 		// If a minID is given, we | ||||
| 		// page up, i.e. ascending. | ||||
| 		asc = true | ||||
| 
 | ||||
| 		for i := 0; i < len(ids); i++ { | ||||
| 			if ids[i] == p.MinID { | ||||
| 				// Hit the boundary. | ||||
| 				// Reslice to be: | ||||
| 				// "up to here" | ||||
| 				ids = ids[:i] | ||||
| 				break | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if asc && len(ids) > 1 { | ||||
| 		var ( | ||||
| 			// Start at front. | ||||
| 			i = 0 | ||||
| 
 | ||||
| 			// Start at back. | ||||
| 			j = len(ids) - 1 | ||||
| 		) | ||||
| 
 | ||||
| 		// Clone input IDs before | ||||
| 		// we perform modifications. | ||||
| 		ids = slices.Clone(ids) | ||||
| 
 | ||||
| 		for i < j { | ||||
| 			// Swap i,j index values in slice. | ||||
| 			ids[i], ids[j] = ids[j], ids[i] | ||||
| 
 | ||||
| 			// incr + decr, | ||||
| 			// looping until | ||||
| 			// they meet in | ||||
| 			// the middle. | ||||
| 			i++ | ||||
| 			j-- | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if p.Limit > 0 && p.Limit < len(ids) { | ||||
| 		// Reslice IDs to given limit. | ||||
| 		ids = ids[:p.Limit] | ||||
| 	} | ||||
| 
 | ||||
| 	return ids | ||||
| } | ||||
|  | @ -1,171 +0,0 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging_test | ||||
| 
 | ||||
| import ( | ||||
| 	"testing" | ||||
| 
 | ||||
| 	"github.com/superseriousbusiness/gotosocial/internal/paging" | ||||
| 	"golang.org/x/exp/slices" | ||||
| ) | ||||
| 
 | ||||
| type Case struct { | ||||
| 	// Name is the test case name. | ||||
| 	Name string | ||||
| 
 | ||||
| 	// Input contains test case input ID slice. | ||||
| 	Input []string | ||||
| 
 | ||||
| 	// Expect contains expected test case output. | ||||
| 	Expect []string | ||||
| 
 | ||||
| 	// Page contains the paging function to use. | ||||
| 	Page func([]string) []string | ||||
| } | ||||
| 
 | ||||
| var cases = []Case{ | ||||
| 	{ | ||||
| 		Name: "min_id and max_id set", | ||||
| 		Input: []string{ | ||||
| 			"064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 			"064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 		}, | ||||
| 		Expect: []string{ | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 		}, | ||||
| 		Page: (&paging.Pager{ | ||||
| 			MinID: "064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			MaxID: "064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 		}).PageAsc, | ||||
| 	}, | ||||
| 	{ | ||||
| 		Name: "min_id, max_id and limit set", | ||||
| 		Input: []string{ | ||||
| 			"064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 			"064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 		}, | ||||
| 		Expect: []string{ | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 		}, | ||||
| 		Page: (&paging.Pager{ | ||||
| 			MinID: "064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			MaxID: "064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 			Limit: 5, | ||||
| 		}).PageAsc, | ||||
| 	}, | ||||
| 	{ | ||||
| 		Name: "min_id, max_id and too-large limit set", | ||||
| 		Input: []string{ | ||||
| 			"064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 			"064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 		}, | ||||
| 		Expect: []string{ | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 		}, | ||||
| 		Page: (&paging.Pager{ | ||||
| 			MinID: "064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			MaxID: "064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 			Limit: 100, | ||||
| 		}).PageAsc, | ||||
| 	}, | ||||
| 	{ | ||||
| 		Name: "since_id and max_id set", | ||||
| 		Input: []string{ | ||||
| 			"064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 			"064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 		}, | ||||
| 		Expect: []string{ | ||||
| 			"064Q5D7VK8H7WMJS399SHEPCB0", | ||||
| 			"064Q5D7VJYFBYSAH86KDBKZ6AC", | ||||
| 			"064Q5D7VJMWXZD3S1KT7RD51N8", | ||||
| 			"064Q5D7VJADJTPA3GW8WAX10TW", | ||||
| 			"064Q5D7VJ073XG9ZTWHA2KHN10", | ||||
| 			"064Q5D7VHMSW9DF3GCS088VAZC", | ||||
| 			"064Q5D7VH5F0JXG6W5NCQ3JCWW", | ||||
| 			"064Q5D7VGPTC4NK5T070VYSSF8", | ||||
| 		}, | ||||
| 		Page: (&paging.Pager{ | ||||
| 			SinceID: "064Q5D7VG6TPPQ46T09MHJ96FW", | ||||
| 			MaxID:   "064Q5D7VKG5EQ43TYP71B4K6K0", | ||||
| 		}).PageAsc, | ||||
| 	}, | ||||
| } | ||||
| 
 | ||||
| func TestPage(t *testing.T) { | ||||
| 	for _, c := range cases { | ||||
| 		t.Run(c.Name, func(t *testing.T) { | ||||
| 			// Page the input slice. | ||||
| 			out := c.Page(c.Input) | ||||
| 
 | ||||
| 			// Check paged output is as expected. | ||||
| 			if !slices.Equal(out, c.Expect) { | ||||
| 				t.Errorf("\nreceived=%v\nexpect%v\n", out, c.Expect) | ||||
| 			} | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										111
									
								
								internal/paging/parse.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								internal/paging/parse.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,111 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| import ( | ||||
| 	"strconv" | ||||
| 
 | ||||
| 	"github.com/gin-gonic/gin" | ||||
| 	"github.com/superseriousbusiness/gotosocial/internal/gtserror" | ||||
| ) | ||||
| 
 | ||||
| // ParseIDPage parses an ID Page from a request context, returning BadRequest on error parsing. | ||||
| // The min, max and default parameters define the page size limit minimum, maximum and default | ||||
| // value, where a non-zero default will enforce paging for the endpoint on which this is called. | ||||
| // While conversely, a zero default limit will not enforce paging, returning a nil page value. | ||||
| func ParseIDPage(c *gin.Context, min, max, _default int) (*Page, gtserror.WithCode) { | ||||
| 	// Extract request query params. | ||||
| 	sinceID := c.Query("since_id") | ||||
| 	minID := c.Query("min_id") | ||||
| 	maxID := c.Query("max_id") | ||||
| 
 | ||||
| 	// Extract request limit parameter. | ||||
| 	limit, errWithCode := ParseLimit(c, min, max, _default) | ||||
| 	if errWithCode != nil { | ||||
| 		return nil, errWithCode | ||||
| 	} | ||||
| 
 | ||||
| 	if sinceID == "" && | ||||
| 		minID == "" && | ||||
| 		maxID == "" && | ||||
| 		limit == 0 { | ||||
| 		// No ID paging params provided, and no default | ||||
| 		// limit value which indicates paging not enforced. | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 
 | ||||
| 	return &Page{ | ||||
| 		Min:   MinID(minID, sinceID), | ||||
| 		Max:   MaxID(maxID), | ||||
| 		Limit: limit, | ||||
| 	}, nil | ||||
| } | ||||
| 
 | ||||
| // ParseShortcodeDomainPage parses an emoji shortcode domain Page from a request context, returning BadRequest | ||||
| // on error parsing. The min, max and default parameters define the page size limit minimum, maximum and default | ||||
| // value where a non-zero default will enforce paging for the endpoint on which this is called. While conversely, | ||||
| // a zero default limit will not enforce paging, returning a nil page value. | ||||
| func ParseShortcodeDomainPage(c *gin.Context, min, max, _default int) (*Page, gtserror.WithCode) { | ||||
| 	// Extract request query parameters. | ||||
| 	minShortcode := c.Query("min_shortcode_domain") | ||||
| 	maxShortcode := c.Query("max_shortcode_domain") | ||||
| 
 | ||||
| 	// Extract request limit parameter. | ||||
| 	limit, errWithCode := ParseLimit(c, min, max, _default) | ||||
| 	if errWithCode != nil { | ||||
| 		return nil, errWithCode | ||||
| 	} | ||||
| 
 | ||||
| 	if minShortcode == "" && | ||||
| 		maxShortcode == "" && | ||||
| 		limit == 0 { | ||||
| 		// No ID paging params provided, and no default | ||||
| 		// limit value which indicates paging not enforced. | ||||
| 		return nil, nil | ||||
| 	} | ||||
| 
 | ||||
| 	return &Page{ | ||||
| 		Min:   MinShortcodeDomain(minShortcode), | ||||
| 		Max:   MaxShortcodeDomain(maxShortcode), | ||||
| 		Limit: limit, | ||||
| 	}, nil | ||||
| } | ||||
| 
 | ||||
| // ParseLimit parses the limit query parameter from a request context, returning BadRequest on error parsing and _default if zero limit given. | ||||
| func ParseLimit(c *gin.Context, min, max, _default int) (int, gtserror.WithCode) { | ||||
| 	// Get limit query param. | ||||
| 	str := c.Query("limit") | ||||
| 
 | ||||
| 	// Attempt to parse limit int. | ||||
| 	i, err := strconv.Atoi(str) | ||||
| 	if err != nil { | ||||
| 		const help = "bad integer limit value" | ||||
| 		return 0, gtserror.NewErrorBadRequest(err, help) | ||||
| 	} | ||||
| 
 | ||||
| 	switch { | ||||
| 	case i == 0: | ||||
| 		return _default, nil | ||||
| 	case i < min: | ||||
| 		return min, nil | ||||
| 	case i > max: | ||||
| 		return max, nil | ||||
| 	default: | ||||
| 		return i, nil | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										91
									
								
								internal/paging/response.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								internal/paging/response.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,91 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| import ( | ||||
| 	"strings" | ||||
| 
 | ||||
| 	apimodel "github.com/superseriousbusiness/gotosocial/internal/api/model" | ||||
| 	"github.com/superseriousbusiness/gotosocial/internal/config" | ||||
| ) | ||||
| 
 | ||||
| // ResponseParams models the parameters to pass to PageableResponse. | ||||
| // | ||||
| // The given items will be provided in the paged response. | ||||
| // | ||||
| // The other values are all used to create the Link header so that callers know | ||||
| // which endpoint to query next and previously in order to do paging. | ||||
| type ResponseParams struct { | ||||
| 	Items []interface{} // Sorted slice of items (statuses, notifications, etc) | ||||
| 	Path  string        // path to use for next/prev queries in the link header | ||||
| 	Next  *Page         // page details for the next page | ||||
| 	Prev  *Page         // page details for the previous page | ||||
| 	Query []string      // any extra query parameters to provide in the link header, should be in the format 'example=value' | ||||
| } | ||||
| 
 | ||||
| // PackageResponse is a convenience function for returning | ||||
| // a bunch of pageable items (notifications, statuses, etc), as well | ||||
| // as a Link header to inform callers of where to find next/prev items. | ||||
| func PackageResponse(params ResponseParams) *apimodel.PageableResponse { | ||||
| 	if len(params.Items) == 0 { | ||||
| 		// No items to page through. | ||||
| 		return EmptyResponse() | ||||
| 	} | ||||
| 
 | ||||
| 	var ( | ||||
| 		// Extract paging params. | ||||
| 		nextPg = params.Next | ||||
| 		prevPg = params.Prev | ||||
| 
 | ||||
| 		// Host app configuration. | ||||
| 		proto = config.GetProtocol() | ||||
| 		host  = config.GetHost() | ||||
| 
 | ||||
| 		// Combined next/prev page link header parts. | ||||
| 		linkHeaderParts = make([]string, 0, 2) | ||||
| 	) | ||||
| 
 | ||||
| 	// Build the next / previous page links from page and host config. | ||||
| 	nextLink := nextPg.ToLink(proto, host, params.Path, params.Query) | ||||
| 	prevLink := prevPg.ToLink(proto, host, params.Path, params.Query) | ||||
| 
 | ||||
| 	if nextLink != "" { | ||||
| 		// Append page "next" link to header parts. | ||||
| 		linkHeaderParts = append(linkHeaderParts, `<`+nextLink+`>; rel="next"`) | ||||
| 	} | ||||
| 
 | ||||
| 	if prevLink != "" { | ||||
| 		// Append page "prev" link to header parts. | ||||
| 		linkHeaderParts = append(linkHeaderParts, `<`+prevLink+`>; rel="prev"`) | ||||
| 	} | ||||
| 
 | ||||
| 	return &apimodel.PageableResponse{ | ||||
| 		Items:      params.Items, | ||||
| 		NextLink:   nextLink, | ||||
| 		PrevLink:   prevLink, | ||||
| 		LinkHeader: strings.Join(linkHeaderParts, ", "), | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| // EmptyResponse just returns an empty | ||||
| // PageableResponse with no link header or items. | ||||
| func EmptyResponse() *apimodel.PageableResponse { | ||||
| 	return &apimodel.PageableResponse{ | ||||
| 		Items: []interface{}{}, | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										134
									
								
								internal/paging/response_test.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										134
									
								
								internal/paging/response_test.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,134 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging_test | ||||
| 
 | ||||
| import ( | ||||
| 	"testing" | ||||
| 
 | ||||
| 	"github.com/stretchr/testify/suite" | ||||
| 	"github.com/superseriousbusiness/gotosocial/internal/config" | ||||
| 	"github.com/superseriousbusiness/gotosocial/internal/paging" | ||||
| ) | ||||
| 
 | ||||
| type PagingSuite struct { | ||||
| 	suite.Suite | ||||
| } | ||||
| 
 | ||||
| func (suite *PagingSuite) TestPagingStandard() { | ||||
| 	config.SetHost("example.org") | ||||
| 
 | ||||
| 	params := paging.ResponseParams{ | ||||
| 		Items: make([]interface{}, 10, 10), | ||||
| 		Path:  "/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses", | ||||
| 		Next:  nextPage("01H11KA1DM2VH3747YDE7FV5HN", 10), | ||||
| 		Prev:  prevPage("01H11KBBVRRDYYC5KEPME1NP5R", 10), | ||||
| 	} | ||||
| 
 | ||||
| 	resp := paging.PackageResponse(params) | ||||
| 
 | ||||
| 	suite.Equal(make([]interface{}, 10, 10), resp.Items) | ||||
| 	suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10>; rel="next", <https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10>; rel="prev"`, resp.LinkHeader) | ||||
| 	suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10`, resp.NextLink) | ||||
| 	suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10`, resp.PrevLink) | ||||
| } | ||||
| 
 | ||||
| func (suite *PagingSuite) TestPagingNoLimit() { | ||||
| 	config.SetHost("example.org") | ||||
| 
 | ||||
| 	params := paging.ResponseParams{ | ||||
| 		Items: make([]interface{}, 10, 10), | ||||
| 		Path:  "/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses", | ||||
| 		Next:  nextPage("01H11KA1DM2VH3747YDE7FV5HN", 0), | ||||
| 		Prev:  prevPage("01H11KBBVRRDYYC5KEPME1NP5R", 0), | ||||
| 	} | ||||
| 
 | ||||
| 	resp := paging.PackageResponse(params) | ||||
| 
 | ||||
| 	suite.Equal(make([]interface{}, 10, 10), resp.Items) | ||||
| 	suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN>; rel="next", <https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R>; rel="prev"`, resp.LinkHeader) | ||||
| 	suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN`, resp.NextLink) | ||||
| 	suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R`, resp.PrevLink) | ||||
| } | ||||
| 
 | ||||
| func (suite *PagingSuite) TestPagingNoNextID() { | ||||
| 	config.SetHost("example.org") | ||||
| 
 | ||||
| 	params := paging.ResponseParams{ | ||||
| 		Items: make([]interface{}, 10, 10), | ||||
| 		Path:  "/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses", | ||||
| 		Prev:  prevPage("01H11KBBVRRDYYC5KEPME1NP5R", 10), | ||||
| 	} | ||||
| 
 | ||||
| 	resp := paging.PackageResponse(params) | ||||
| 
 | ||||
| 	suite.Equal(make([]interface{}, 10, 10), resp.Items) | ||||
| 	suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10>; rel="prev"`, resp.LinkHeader) | ||||
| 	suite.Equal(``, resp.NextLink) | ||||
| 	suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?min_id=01H11KBBVRRDYYC5KEPME1NP5R&limit=10`, resp.PrevLink) | ||||
| } | ||||
| 
 | ||||
| func (suite *PagingSuite) TestPagingNoPrevID() { | ||||
| 	config.SetHost("example.org") | ||||
| 
 | ||||
| 	params := paging.ResponseParams{ | ||||
| 		Items: make([]interface{}, 10, 10), | ||||
| 		Path:  "/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses", | ||||
| 		Next:  nextPage("01H11KA1DM2VH3747YDE7FV5HN", 10), | ||||
| 	} | ||||
| 
 | ||||
| 	resp := paging.PackageResponse(params) | ||||
| 
 | ||||
| 	suite.Equal(make([]interface{}, 10, 10), resp.Items) | ||||
| 	suite.Equal(`<https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10>; rel="next"`, resp.LinkHeader) | ||||
| 	suite.Equal(`https://example.org/api/v1/accounts/01H11KA68PM4NNYJEG0FJQ90R3/statuses?max_id=01H11KA1DM2VH3747YDE7FV5HN&limit=10`, resp.NextLink) | ||||
| 	suite.Equal(``, resp.PrevLink) | ||||
| } | ||||
| 
 | ||||
| func (suite *PagingSuite) TestPagingNoItems() { | ||||
| 	config.SetHost("example.org") | ||||
| 
 | ||||
| 	params := paging.ResponseParams{ | ||||
| 		Next: nextPage("01H11KA1DM2VH3747YDE7FV5HN", 10), | ||||
| 		Prev: prevPage("01H11KBBVRRDYYC5KEPME1NP5R", 10), | ||||
| 	} | ||||
| 
 | ||||
| 	resp := paging.PackageResponse(params) | ||||
| 
 | ||||
| 	suite.Empty(resp.Items) | ||||
| 	suite.Empty(resp.LinkHeader) | ||||
| 	suite.Empty(resp.NextLink) | ||||
| 	suite.Empty(resp.PrevLink) | ||||
| } | ||||
| 
 | ||||
| func TestPagingSuite(t *testing.T) { | ||||
| 	suite.Run(t, &PagingSuite{}) | ||||
| } | ||||
| 
 | ||||
| func nextPage(id string, limit int) *paging.Page { | ||||
| 	return &paging.Page{ | ||||
| 		Max:   paging.MaxID(id), | ||||
| 		Limit: limit, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func prevPage(id string, limit int) *paging.Page { | ||||
| 	return &paging.Page{ | ||||
| 		Min:   paging.MinID(id, ""), | ||||
| 		Limit: limit, | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										49
									
								
								internal/paging/util.go
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								internal/paging/util.go
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,49 @@ | |||
| // GoToSocial | ||||
| // Copyright (C) GoToSocial Authors admin@gotosocial.org | ||||
| // SPDX-License-Identifier: AGPL-3.0-or-later | ||||
| // | ||||
| // This program is free software: you can redistribute it and/or modify | ||||
| // it under the terms of the GNU Affero General Public License as published by | ||||
| // the Free Software Foundation, either version 3 of the License, or | ||||
| // (at your option) any later version. | ||||
| // | ||||
| // This program is distributed in the hope that it will be useful, | ||||
| // but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||
| // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||
| // GNU Affero General Public License for more details. | ||||
| // | ||||
| // You should have received a copy of the GNU Affero General Public License | ||||
| // along with this program.  If not, see <http://www.gnu.org/licenses/>. | ||||
| 
 | ||||
| package paging | ||||
| 
 | ||||
| // Reverse will reverse the given input slice. | ||||
| func Reverse(in []string) []string { | ||||
| 	var ( | ||||
| 		// Start at front. | ||||
| 		i = 0 | ||||
| 
 | ||||
| 		// Start at back. | ||||
| 		j = len(in) - 1 | ||||
| 	) | ||||
| 
 | ||||
| 	for i < j { | ||||
| 		// Swap i,j index values in slice. | ||||
| 		in[i], in[j] = in[j], in[i] | ||||
| 
 | ||||
| 		// incr + decr, | ||||
| 		// looping until | ||||
| 		// they meet in | ||||
| 		// the middle. | ||||
| 		i++ | ||||
| 		j-- | ||||
| 	} | ||||
| 
 | ||||
| 	return in | ||||
| } | ||||
| 
 | ||||
| // zero is a shorthand to check a generic value is its zero value. | ||||
| func zero[T comparable](t T) bool { | ||||
| 	var z T | ||||
| 	return t == z | ||||
| } | ||||
|  | @ -34,11 +34,11 @@ import ( | |||
| func (p *Processor) BlocksGet( | ||||
| 	ctx context.Context, | ||||
| 	requestingAccount *gtsmodel.Account, | ||||
| 	page paging.Pager, | ||||
| 	page *paging.Page, | ||||
| ) (*apimodel.PageableResponse, gtserror.WithCode) { | ||||
| 	blocks, err := p.state.DB.GetAccountBlocks(ctx, | ||||
| 		requestingAccount.ID, | ||||
| 		&page, | ||||
| 		page, | ||||
| 	) | ||||
| 	if err != nil && !errors.Is(err, db.ErrNoEntries) { | ||||
| 		return nil, gtserror.NewErrorInternalError(err) | ||||
|  | @ -77,13 +77,10 @@ func (p *Processor) BlocksGet( | |||
| 		items = append(items, account) | ||||
| 	} | ||||
| 
 | ||||
| 	return util.PackagePageableResponse(util.PageableResponseParams{ | ||||
| 	return paging.PackageResponse(paging.ResponseParams{ | ||||
| 		Items: items, | ||||
| 		Path:  "/api/v1/blocks", | ||||
| 		NextMaxIDKey:   "max_id", | ||||
| 		PrevMinIDKey:   "since_id", | ||||
| 		NextMaxIDValue: nextMaxIDValue, | ||||
| 		PrevMinIDValue: prevMinIDValue, | ||||
| 		Limit:          page.Limit, | ||||
| 	}) | ||||
| 		Next:  page.Next(nextMaxIDValue), | ||||
| 		Prev:  page.Prev(prevMinIDValue), | ||||
| 	}), nil | ||||
| } | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue