mirror of
				https://codeberg.org/yeentown/barkey.git
				synced 2025-11-04 07:24:13 +00:00 
			
		
		
		
	rework pagination - fixes #491
previously, when adding items either at the beginnig (e.g. new notes coming in while we're not looking at the top of the timeline) or a the end (e.g. more items arriving from a background fetch) of a paginated view, the resulting list got truncated to `displayLimit`, potentially throwing data away and causing a new fetch. This, coupled with the async nature of scrolling & fetching, could cause weird results. Also, `offset` was always incremented by the size of the fetched results, even if not all of them were displayed, meant that it was possible for offset-based pagination to drop items. Finally, the "queue" of new items (usually, new notes) also got truncated to `displayLimit`, which again could drop items (this effect was usually masked by the first point: when scrolling to the top of the timeline, if the queue's length was equal to `displayLimit`, those notes displaced any existing ones, `unshiftItems` set `more.value=true`, you got scrolled to the top, and notes were fetched again, so you lost your position but at least all notes got shown, eventually)
This commit is contained in:
		
							parent
							
								
									e0afeff248
								
							
						
					
					
						commit
						dbfafe25e3
					
				
					 1 changed files with 9 additions and 9 deletions
				
			
		| 
						 | 
				
			
			@ -395,10 +395,10 @@ const prepend = (item: MisskeyEntity): void => {
 | 
			
		|||
 * @param newItems 新しいアイテムの配列
 | 
			
		||||
 */
 | 
			
		||||
function unshiftItems(newItems: MisskeyEntity[]) {
 | 
			
		||||
	const length = newItems.length + items.value.size;
 | 
			
		||||
	items.value = new Map([...arrayToEntries(newItems), ...items.value].slice(0, props.displayLimit));
 | 
			
		||||
 | 
			
		||||
	if (length >= props.displayLimit) more.value = true;
 | 
			
		||||
	const prevLength = items.value.size;
 | 
			
		||||
	items.value = new Map([...arrayToEntries(newItems), ...items.value].slice(0, newItems.length + props.displayLimit));
 | 
			
		||||
	// if we truncated, mark that there are more values to fetch
 | 
			
		||||
	if (items.value.size < prevLength) more.value = true;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
| 
						 | 
				
			
			@ -406,10 +406,10 @@ function unshiftItems(newItems: MisskeyEntity[]) {
 | 
			
		|||
 * @param oldItems 古いアイテムの配列
 | 
			
		||||
 */
 | 
			
		||||
function concatItems(oldItems: MisskeyEntity[]) {
 | 
			
		||||
	const length = oldItems.length + items.value.size;
 | 
			
		||||
	items.value = new Map([...items.value, ...arrayToEntries(oldItems)].slice(0, props.displayLimit));
 | 
			
		||||
 | 
			
		||||
	if (length >= props.displayLimit) more.value = true;
 | 
			
		||||
	const prevLength = items.value.size;
 | 
			
		||||
	items.value = new Map([...items.value, ...arrayToEntries(oldItems)].slice(0, oldItems.length + props.displayLimit));
 | 
			
		||||
	// if we truncated, mark that there are more values to fetch
 | 
			
		||||
	if (items.value.size < prevLength) more.value = true;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function executeQueue() {
 | 
			
		||||
| 
						 | 
				
			
			@ -418,7 +418,7 @@ function executeQueue() {
 | 
			
		|||
}
 | 
			
		||||
 | 
			
		||||
function prependQueue(newItem: MisskeyEntity) {
 | 
			
		||||
	queue.value = new Map([[newItem.id, newItem], ...queue.value].slice(0, props.displayLimit) as [string, MisskeyEntity][]);
 | 
			
		||||
	queue.value = new Map([[newItem.id, newItem], ...queue.value] as [string, MisskeyEntity][]);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/*
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		
		Reference in a new issue