Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
186 changes: 186 additions & 0 deletions src/api-utils.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
import { afterEach, beforeEach, describe, expect, it } from "bun:test";
import { fetchWithRetry, paginatedFetch } from "./api-utils.ts";

const originalFetch = globalThis.fetch;
const originalSetTimeout = globalThis.setTimeout;

// Make all delays instant in tests so the suite stays fast
beforeEach(() => {
// biome-ignore lint/suspicious/noExplicitAny: test-only shim
globalThis.setTimeout = ((fn: () => void, _delay?: number) => {
fn();
return 0;
}) as any;
});

afterEach(() => {
globalThis.fetch = originalFetch;
globalThis.setTimeout = originalSetTimeout;
});

// ─── fetchWithRetry ───────────────────────────────────────────────────────────

describe("fetchWithRetry", () => {
it("returns a 200 response immediately without retrying", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
return new Response("ok", { status: 200 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {});
expect(res.status).toBe(200);
expect(calls).toBe(1);
});

it("returns a non-retryable error response (404) immediately without retrying", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
return new Response("not found", { status: 404 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {}, 3);
expect(res.status).toBe(404);
expect(calls).toBe(1);
});

it("returns a non-retryable error response (401) immediately without retrying", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
return new Response("unauthorized", { status: 401 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {}, 3);
expect(res.status).toBe(401);
expect(calls).toBe(1);
});

it("retries on 429 and succeeds on the second attempt", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
if (calls === 1) return new Response("rate limited", { status: 429 });
return new Response("ok", { status: 200 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {}, 3);
expect(res.status).toBe(200);
expect(calls).toBe(2);
});

it("retries on 429 and reads the Retry-After header", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
if (calls === 1) {
return new Response("rate limited", {
status: 429,
headers: { "Retry-After": "5" },
});
}
return new Response("ok", { status: 200 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {}, 3);
expect(res.status).toBe(200);
expect(calls).toBe(2);
});

it("retries on 503 and succeeds on the third attempt", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
if (calls < 3) return new Response("service unavailable", { status: 503 });
return new Response("ok", { status: 200 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {}, 3);
expect(res.status).toBe(200);
expect(calls).toBe(3);
});

it("returns the last 429 response after maxRetries exhausted", async () => {
let calls = 0;
globalThis.fetch = (async () => {
calls++;
return new Response("still rate limited", { status: 429 });
}) as typeof fetch;

const res = await fetchWithRetry("https://example.com", {}, 2);
expect(res.status).toBe(429);
// 1 initial + 2 retries = 3 total calls
expect(calls).toBe(3);
});

it("passes the request options to fetch", async () => {
let capturedOptions: RequestInit | undefined;
globalThis.fetch = (async (_url: string | URL | Request, opts?: RequestInit) => {
capturedOptions = opts;
return new Response("ok", { status: 200 });
}) as typeof fetch;

await fetchWithRetry("https://example.com", {
headers: { Authorization: "Bearer token" },
});
expect((capturedOptions?.headers as Record<string, string>)?.["Authorization"]).toBe(
"Bearer token",
);
});
});

// ─── paginatedFetch ───────────────────────────────────────────────────────────

describe("paginatedFetch", () => {
it("returns empty array when first page is empty", async () => {
const result = await paginatedFetch(async () => []);
expect(result).toEqual([]);
});

it("returns all items when they fit in a single page", async () => {
const result = await paginatedFetch(async () => ["a", "b", "c"], 100);
expect(result).toEqual(["a", "b", "c"]);
});

it("fetches multiple pages until the last page has fewer items than pageSize", async () => {
const pages = [
["a", "b"], // full page (pageSize = 2)
["c", "d"], // full page
["e"], // last page (< 2 items)
];
let callCount = 0;
const result = await paginatedFetch(async (page) => pages[page - 1] ?? [], 2);
void callCount;
Comment on lines +153 to +154
Copy link

Copilot AI Feb 22, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This test declares callCount but never increments or asserts on it, and void callCount; just masks the unused-variable warning. Either remove callCount entirely or use it to assert how many pages were fetched, so the test meaningfully verifies multi-page behavior.

Suggested change
const result = await paginatedFetch(async (page) => pages[page - 1] ?? [], 2);
void callCount;
const result = await paginatedFetch(async (page) => {
callCount++;
return pages[page - 1] ?? [];
}, 2);
expect(callCount).toBe(3);

Copilot uses AI. Check for mistakes.
expect(result).toEqual(["a", "b", "c", "d", "e"]);
});

it("passes the correct page number to fetchPage", async () => {
const capturedPages: number[] = [];
await paginatedFetch(async (page) => {
capturedPages.push(page);
if (page < 3) return Array(2).fill("x");
return ["x"]; // last page
}, 2);
expect(capturedPages).toEqual([1, 2, 3]);
});

it("triggers a second fetch when the page is exactly full, stops when the next page is empty", async () => {
const pages: string[][] = [["a", "b"], []]; // full then empty
const capturedPages: number[] = [];
const result = await paginatedFetch(async (page) => {
capturedPages.push(page);
return pages[page - 1] ?? [];
}, 2);
expect(result).toEqual(["a", "b"]);
expect(capturedPages).toEqual([1, 2]);
});

it("propagates errors thrown by fetchPage", async () => {
await expect(
paginatedFetch(async () => {
throw new Error("API error");
}),
).rejects.toThrow("API error");
});
});
76 changes: 76 additions & 0 deletions src/api-utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// ─── API utilities — pagination and retry helpers ─────────────────────────────
//
// Pure-async helpers with no side effects beyond network I/O. These are the
// only place in the codebase that knows about GitHub rate-limit semantics.
Comment on lines +3 to +4
Copy link

Copilot AI Feb 22, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The file header describes these as “Pure-async helpers with no side effects beyond network I/O”, but the implementation also depends on timers and nondeterminism (setTimeout and Math.random() jitter). Please adjust the comment to match reality (or remove jitter) so future readers don’t treat this module as pure/deterministic.

Suggested change
// Pure-async helpers with no side effects beyond network I/O. These are the
// only place in the codebase that knows about GitHub rate-limit semantics.
// Async helpers that encapsulate pagination, retry, and GitHub rate-limit
// semantics for network calls.

Copilot uses AI. Check for mistakes.

const RETRYABLE_STATUSES = new Set([429, 503]);
const BASE_RETRY_DELAY_MS = 1_000;
const MAX_RETRY_DELAY_MS = 60_000;

/**
* Performs a `fetch` with automatic retry on 429 (rate-limited) and 503
* (server unavailable), using exponential backoff with optional `Retry-After`
* header support.
*
* Non-retryable responses (including successful ones) are returned immediately.
* After `maxRetries` exhausted the last response is returned — callers must
* still check `res.ok`.
*/
export async function fetchWithRetry(
url: string,
options: RequestInit,
maxRetries = 3,
): Promise<Response> {
Comment on lines +19 to +23
Copy link

Copilot AI Feb 22, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

src/api-utils.ts introduces a new side-effectful module (uses fetch/setTimeout) even though the repo docs state that I/O should be isolated to api.ts, tui.ts, and github-code-search.ts (see AGENTS.md:94-96). To stay within that boundary, either keep these helpers inside api.ts, or make api-utils.ts purely computational (e.g., export backoff/pagination logic that takes injected fetch/sleep functions from api.ts). If the intent is to expand the allowed I/O surface, the architecture docs should be updated accordingly.

Copilot uses AI. Check for mistakes.
let attempt = 0;
while (true) {
const res = await fetch(url, options);
if (!RETRYABLE_STATUSES.has(res.status) || attempt >= maxRetries) {
return res;
}
const retryAfterHeader = res.headers.get("Retry-After");
let delayMs: number;
if (retryAfterHeader !== null) {
const seconds = parseInt(retryAfterHeader, 10);
delayMs =
Number.isFinite(seconds) && seconds > 0
? seconds * 1_000
: Math.min(BASE_RETRY_DELAY_MS * 2 ** attempt, MAX_RETRY_DELAY_MS);
} else {
delayMs = Math.min(BASE_RETRY_DELAY_MS * 2 ** attempt, MAX_RETRY_DELAY_MS);
}
// Add ±10 % jitter to avoid thundering-herd on concurrent retries
delayMs = delayMs * (0.9 + Math.random() * 0.2);
await new Promise((r) => setTimeout(r, delayMs));
attempt++;
}
}

/**
* Fetches all pages from a paginated GitHub API endpoint.
*
* Calls `fetchPage(pageNumber)` starting at page 1 and stops when the
* returned array contains fewer items than `pageSize` (last page signal).
*
* @param fetchPage Function that fetches a single page and returns its items.
* Should throw on unrecoverable errors.
* @param pageSize Expected maximum items per page (default 100). Used as the
* stop condition: `items.length < pageSize → last page`.
* @param delayMs Optional inter-page delay in milliseconds. Useful to stay
* polite with rate limits on high-volume endpoints.
*/
export async function paginatedFetch<T>(
fetchPage: (page: number) => Promise<T[]>,
pageSize = 100,
delayMs = 0,
): Promise<T[]> {
const all: T[] = [];
let page = 1;
while (true) {
const items = await fetchPage(page);
all.push(...items);
if (items.length < pageSize) break;
page++;
if (delayMs > 0) await new Promise((r) => setTimeout(r, delayMs));
}
return all;
}
Loading