···11+BSD 3-Clause License
22+33+Copyright (c) 2025, Mary
44+55+Redistribution and use in source and binary forms, with or without
66+modification, are permitted provided that the following conditions are met:
77+88+1. Redistributions of source code must retain the above copyright notice, this
99+ list of conditions and the following disclaimer.
1010+1111+2. Redistributions in binary form must reproduce the above copyright notice,
1212+ this list of conditions and the following disclaimer in the documentation
1313+ and/or other materials provided with the distribution.
1414+1515+3. Neither the name of the copyright holder nor the names of its
1616+ contributors may be used to endorse or promote products derived from
1717+ this software without specific prior written permission.
1818+1919+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
2020+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
2121+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
2222+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
2323+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
2424+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
2525+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
2626+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
2727+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
2828+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+35
README.md
···11+# batch-fetch
22+33+utility for batching individual queries into one single request.
44+55+```ts
66+type UserId = ReturnType<typeof crypto.randomUUID>;
77+interface User {
88+ id: UserId;
99+ name: string;
1010+}
1111+1212+const fetchUser = createBatchedFetch<UserId, User>({
1313+ limit: 50,
1414+ async fetch(userIds, signal) {
1515+ const url = new URL(`/api/users`, location.origin);
1616+ for (const id of userIds) {
1717+ url.searchParams.append('ids', id);
1818+ }
1919+2020+ const response = await fetch(url, { signal });
2121+2222+ return await response.json() as unknown as User[];
2323+ },
2424+ idFromResource: (user) => user.id,
2525+});
2626+2727+// these individual queries will be batched into one
2828+{
2929+ const p1 = fetchUser('c83e7271-c865-4eae-8c6a-d6f21acb17c1');
3030+ const p2 = fetchUser('306e38a2-bea6-4bf7-80ea-21822aa24c40');
3131+3232+ const [user1, user2] = await Promise.all([p1, p2]);
3333+ // ...
3434+}
3535+```
···11+type Promisable<T> = T | Promise<T>;
22+33+/** identifies a resource */
44+export type ResourceId = string | number;
55+66+type BatchedFetchMap<Id extends ResourceId, Resource, Query = Id> = {
77+ /** grouping key */
88+ key: string | number | undefined;
99+ /** timer for batch execution */
1010+ timeout: ReturnType<typeof setTimeout> | undefined;
1111+ /** controls the lifecycle of this batch */
1212+ controller: AbortController;
1313+ /** a map of pending queries */
1414+ pending: Map<
1515+ Id,
1616+ {
1717+ /** original query descriptor */
1818+ query: Query;
1919+ /** promise that will resolve with the resource */
2020+ deferred: PromiseWithResolvers<Resource>;
2121+ /** whether we have a consumer without an abort signal */
2222+ passive: boolean;
2323+ /** amount of consumers with an abort signal */
2424+ signals: number;
2525+ }
2626+ >;
2727+};
2828+2929+type BaseOptions<Id extends ResourceId, Resource, Query = Id> = {
3030+ /**
3131+ * maximum number of queries that can be included in one request
3232+ */
3333+ limit: number;
3434+3535+ /**
3636+ * how long to wait for new queries to be collected before a request
3737+ * @default 125
3838+ */
3939+ timeout?: number;
4040+4141+ /**
4242+ * performs the request
4343+ * @param queries queries passed for this batch
4444+ * @param signal abort signal
4545+ * @returns array of resources
4646+ */
4747+ fetch: (queries: Query[], signal: AbortSignal) => Promisable<Resource[]>;
4848+4949+ /**
5050+ * optional function for separating queries into different batch
5151+ * @param query query descriptor
5252+ * @returns batch grouping key
5353+ */
5454+ key?: (query: Query) => string | number;
5555+5656+ /**
5757+ * function that takes in the resource's identifier, used to associate
5858+ * resources with the queries
5959+ * @param resource resource
6060+ * @returns resource identifier
6161+ */
6262+ idFromResource: (resource: Resource) => Id;
6363+};
6464+6565+/** options for batch fetching */
6666+export type BatchedFetchOptions<Id extends ResourceId, Resource, Query = Id> =
6767+ & BaseOptions<Id, Resource, Query>
6868+ & (Query extends Id ? {
6969+ /**
7070+ * function that takes in the resource identifier from the query descriptor,
7171+ * used for deduplication and resource matching.
7272+ * @param query query descriptor
7373+ * @returns resource identifier
7474+ */
7575+ idFromQuery?: (query: Query) => Id;
7676+ }
7777+ : {
7878+ /**
7979+ * function that takes in the resource identifier from the query descriptor,
8080+ * used for deduplication and resource matching.
8181+ * @param query query descriptor
8282+ * @returns resource identifier
8383+ */
8484+ idFromQuery: (query: Query) => Id;
8585+ });
8686+8787+/** error thrown when a resource wasn't returned in the response */
8888+export class ResourceMissingError extends Error {
8989+ override readonly name = 'ResourceMissingError';
9090+}
9191+9292+const identity = <T>(value: T): T => value;
9393+9494+/**
9595+ * creates a function that batches individual queries into one single request.
9696+ * @param options configurations
9797+ * @returns a function that you can use to request for a query.
9898+ */
9999+/*#__NO_SIDE_EFFECTS__*/
100100+export const createBatchedFetch = <Id extends ResourceId, Resource, Query = Id>(
101101+ options: BatchedFetchOptions<Id, Resource, Query>,
102102+): (query: Query, signal?: AbortSignal) => Promise<Resource> => {
103103+ const {
104104+ limit,
105105+ timeout = 125,
106106+ fetch,
107107+ key: _key,
108108+ idFromQuery = identity,
109109+ idFromResource,
110110+ } = options;
111111+112112+ /** current active batch */
113113+ let curr: BatchedFetchMap<Id, Resource, Query> | undefined;
114114+115115+ return (query: Query, signal?: AbortSignal): Promise<Resource> => {
116116+ // throw early if provided signal is already aborted
117117+ signal?.throwIfAborted();
118118+119119+ const id = idFromQuery(query);
120120+ const key = _key?.(query);
121121+122122+ // create a new batch if:
123123+ // - we don't have a batch currently waiting
124124+ // - the current batch has already reached the limit
125125+ // - batch key doesn't match
126126+ let batch = curr;
127127+ if (batch === undefined || batch.pending.size >= limit || batch.key !== key) {
128128+ batch = curr = {
129129+ key,
130130+ timeout: undefined,
131131+ controller: new AbortController(),
132132+ pending: new Map(),
133133+ };
134134+ }
135135+136136+ let meta = batch.pending.get(id);
137137+ if (meta === undefined) {
138138+ meta = {
139139+ query: query,
140140+ deferred: Promise.withResolvers(),
141141+ passive: false,
142142+ signals: 0,
143143+ };
144144+145145+ batch.pending.set(id, meta);
146146+ }
147147+148148+ let promise = meta.deferred.promise;
149149+150150+ if (signal === undefined) {
151151+ // this consumer provided no signal, so we can't consider this query for
152152+ // removal if a different consumer has aborted theirs.
153153+ meta.passive = true;
154154+ } else {
155155+ // we need the returned promise to resolve early if the signal is aborted.
156156+ // so we'll race it with this deferred that will only throw.
157157+ const def = Promise.withResolvers<never>();
158158+ promise = Promise.race([promise, def.promise]);
159159+160160+ // make this signal count
161161+ meta.signals++;
162162+163163+ signal.addEventListener(
164164+ 'abort',
165165+ () => {
166166+ // immediately reject this consumer's promise
167167+ def.reject(signal.reason);
168168+169169+ // decrement the count
170170+ meta.signals--;
171171+172172+ // return early, have the query remain in batch if:
173173+ // - we have passive consumers waiting on this query
174174+ // - there are still other consumers with an abort signal waiting
175175+ if (meta.passive || meta.signals > 0) {
176176+ return;
177177+ }
178178+179179+ // no more consumers care about this query, remove from batch
180180+ batch.pending.delete(id);
181181+182182+ // return early, have the batch continue execution if we still need
183183+ // to process other queries.
184184+ if (batch.pending.size > 0) {
185185+ return;
186186+ }
187187+188188+ // batch is empty, clean up completely
189189+ batch.controller.abort();
190190+ clearTimeout(batch.timeout);
191191+192192+ if (curr === batch) {
193193+ curr = undefined;
194194+ }
195195+ },
196196+ {
197197+ once: true,
198198+ signal: batch.controller.signal,
199199+ },
200200+ );
201201+ }
202202+203203+ {
204204+ // reset the execution timer
205205+ clearTimeout(batch.timeout);
206206+207207+ batch.timeout = setTimeout(() => {
208208+ if (curr === batch) {
209209+ curr = undefined;
210210+ }
211211+212212+ perform(batch, fetch, idFromResource);
213213+ }, timeout);
214214+ }
215215+216216+ return promise;
217217+ };
218218+};
219219+220220+const perform = async <Id extends ResourceId, Resource, Query = Id>(
221221+ map: BatchedFetchMap<Id, Resource, Query>,
222222+ fetch: (queries: Query[], signal: AbortSignal) => Promisable<Resource[]>,
223223+ idFromResource: (data: Resource) => Id,
224224+) => {
225225+ const signal = map.controller.signal;
226226+ if (signal.aborted) {
227227+ return;
228228+ }
229229+230230+ const pending = map.pending;
231231+ if (pending.size === 0) {
232232+ // theoretically this should only be empty if the whole-batch signal is
233233+ // aborted, but better be safe.
234234+ return;
235235+ }
236236+237237+ let errored = false;
238238+239239+ try {
240240+ const queries = Array.from(pending.values(), (meta) => meta.query);
241241+ const dataset = await fetch(queries, signal);
242242+243243+ for (const data of dataset) {
244244+ const id = idFromResource(data);
245245+ const meta = pending.get(id);
246246+247247+ meta?.deferred.resolve(data);
248248+ }
249249+ } catch (error) {
250250+ errored = true;
251251+252252+ for (const meta of pending.values()) {
253253+ meta.deferred.reject(error);
254254+ }
255255+ } finally {
256256+ if (!errored) {
257257+ // we've succeeded! we're iterating the pending map again to boot
258258+ // unresolved promises, else they'll end up waiting forever.
259259+ //
260260+ // this should only apply for scenarios where the caller/API handles
261261+ // nonexistent data by omitting it entirely from the results.
262262+ for (const meta of pending.values()) {
263263+ meta.deferred.reject(new ResourceMissingError());
264264+ }
265265+ }
266266+ }
267267+268268+ // abort the controller to clean up event listeners to upstream signals
269269+ map.controller.abort();
270270+};