@@ -70,7 +70,6 @@ export class DataAccessRead implements DataAccessTypes.IDataRead {
7070 }
7171
7272 const pending = this . pendingStore ?. findByTopics ( topics ) || [ ] ;
73-
7473 const pendingItems = pending . map ( ( item ) => ( {
7574 hash : item . storageResult . id ,
7675 channelId : item . channelId ,
@@ -113,25 +112,33 @@ export class DataAccessRead implements DataAccessTypes.IDataRead {
113112
114113 const transactions = result . transactions . concat ( ...pendingItems ) ;
115114
116- // list of channels having at least one tx updated during the updatedBetween boundaries
117- const channels = (
118- updatedBetween
119- ? transactions . filter (
120- ( tx ) =>
121- tx . blockTimestamp >= ( updatedBetween . from || 0 ) &&
122- tx . blockTimestamp <= ( updatedBetween . to || Number . MAX_SAFE_INTEGER ) ,
123- )
124- : transactions
125- ) . map ( ( x ) => x . channelId ) ;
115+ // Apply timestamp filtering FIRST
116+ const filteredTransactions = updatedBetween
117+ ? transactions . filter (
118+ ( tx ) =>
119+ tx . blockTimestamp >= ( updatedBetween . from || 0 ) &&
120+ tx . blockTimestamp <= ( updatedBetween . to || Number . MAX_SAFE_INTEGER ) ,
121+ )
122+ : transactions ;
123+
124+ // Then get unique channels from filtered transactions
125+ const channels = [ ...new Set ( filteredTransactions . map ( ( x ) => x . channelId ) ) ] ;
126126
127+ // Get all transactions for these channels
127128 const filteredTxs = transactions . filter ( ( tx ) => channels . includes ( tx . channelId ) ) ;
129+
130+ // Apply pagination to the filtered results
131+ const start = ( ( page || 1 ) - 1 ) * ( pageSize || filteredTxs . length ) ;
132+ const end = start + ( pageSize || filteredTxs . length ) ;
133+ const paginatedTxs = filteredTxs . slice ( start , end ) ;
134+
128135 return {
129136 meta : {
130- storageMeta : filteredTxs . reduce ( ( acc , tx ) => {
137+ storageMeta : paginatedTxs . reduce ( ( acc , tx ) => {
131138 acc [ tx . channelId ] = [ this . toStorageMeta ( tx , result . blockNumber , this . network ) ] ;
132139 return acc ;
133140 } , { } as Record < string , StorageTypes . IEntryMetadata [ ] > ) ,
134- transactionsStorageLocation : filteredTxs . reduce ( ( prev , curr ) => {
141+ transactionsStorageLocation : paginatedTxs . reduce ( ( prev , curr ) => {
135142 if ( ! prev [ curr . channelId ] ) {
136143 prev [ curr . channelId ] = [ ] ;
137144 }
@@ -141,17 +148,15 @@ export class DataAccessRead implements DataAccessTypes.IDataRead {
141148 pagination :
142149 page && pageSize
143150 ? {
144- total : result . transactions . length + pendingItems . length ,
151+ total : filteredTxs . length ,
145152 page,
146153 pageSize,
147- hasMore :
148- ( page - 1 ) * pageSize + filteredTxs . length - pendingItemsOnCurrentPage <
149- result . transactions . length ,
154+ hasMore : end < filteredTxs . length ,
150155 }
151156 : undefined ,
152157 } ,
153158 result : {
154- transactions : filteredTxs . reduce ( ( prev , curr ) => {
159+ transactions : paginatedTxs . reduce ( ( prev , curr ) => {
155160 if ( ! prev [ curr . channelId ] ) {
156161 prev [ curr . channelId ] = [ ] ;
157162 }
0 commit comments