Compare commits

..

26 Commits

Author SHA1 Message Date
0b237d542b feat: Add a new scheduled task to check upcoming titles and refactor its controller for cron execution 2025-12-19 00:16:33 -05:00
c01e005afb refactor: update title controller tests to use async assertion matchers and refine userId type in anilist service. 2025-12-19 00:07:21 -05:00
e5d9d62be2 docs: update README
add folder info about middleware
2025-12-18 23:52:44 -05:00
8d63d4fa5e feat: use luxon for TTL 2025-12-18 23:52:44 -05:00
07bece1f6c chore: add debug logging to help understand why episode updates won't run 2025-12-18 23:52:43 -05:00
2ed38e92bc chore: remove no longer needed isRetrying boolean 2025-12-18 23:52:43 -05:00
26ca15d4aa fix: 'upcoming' titles failing to fetch
wasn't returning "Page" from the anilist json response
2025-12-18 23:52:43 -05:00
4c96f58cb0 feat: add user profile fetch in middleware 2025-12-18 08:48:22 -05:00
b64bd4fc26 refactor: move existing middleware into its own folder 2025-12-18 08:44:28 -05:00
4c2d0a9177 fix: revert back to using typed documents for GraphQL 2025-12-18 08:43:49 -05:00
dc60a1e045 feat: Increase maximum direct queue delay from 9 to 12 hours and cap retry delays at this new limit. 2025-12-18 07:04:31 -05:00
6570c25617 feat: configure queue retry delays with min/max bounds and update exponential backoff defaults 2025-12-17 09:25:07 -05:00
6f795bdde0 feat: implement generic queue message processing with retry logic 2025-12-17 07:55:59 -05:00
243c279ca9 feat: introduce exponential backoff utility 2025-12-17 07:54:24 -05:00
286824e3a1 refactor: standardize authorization header handling in queueTask 2025-12-17 07:54:03 -05:00
b26d22ad91 feat: conditionally queue AniList updates and use updated payload 2025-12-17 07:52:17 -05:00
3c5685dbdb refactor: remove HonoRequest from updateWatchStatus 2025-12-17 07:52:16 -05:00
c527a6eac5 docs: expand README with detailed setup, tech stack, development, and project structure. 2025-12-17 06:52:58 -05:00
f16ac80b7e chore: remove unnecessary env scripts
since `wrangler types` works as intended now
2025-12-17 06:52:44 -05:00
cd04a75b06 feat: remove Docker-related files and configuration 2025-12-17 06:43:36 -05:00
eb6dc545e2 fix: missing deploy script 2025-12-17 06:41:40 -05:00
a99961df51 fix: missing migrations in wrangler.toml 2025-12-17 06:41:21 -05:00
d5b113c884 test: enhance test environment mocking 2025-12-17 06:36:04 -05:00
6eb42f6a33 feat: update delayed task processing to a shared 9-hour maximum delay, remove invalid KV entries 2025-12-17 06:35:57 -05:00
05df043fbe refactor: use fake Vitest Cloudflare environment for processDelayedTasks test 2025-12-17 06:35:05 -05:00
fb7990b274 test: add 'cloudflare:test' module to export same typings as 'cloudflare:workers' 2025-12-17 06:33:00 -05:00
31 changed files with 633 additions and 545 deletions

View File

@@ -1,16 +0,0 @@
node_modules
Dockerfile*
docker-compose*
.dockerignore
.git
.gitignore
README.md
LICENSE
.vscode
Makefile
helm-charts
.env
.dev.vars
.editorconfig
.idea
coverage*

View File

@@ -1,41 +0,0 @@
# use the official Bun image
# see all versions at https://hub.docker.com/r/oven/bun/tags
FROM oven/bun:1 as base
WORKDIR /usr/app
# install dependencies into temp directory
# this will cache them and speed up future builds
FROM base AS install
RUN mkdir -p /tmp/dev
COPY package.json bun.lockb /tmp/dev/
RUN cd /tmp/dev && bun install --frozen-lockfile
# install with --production (exclude devDependencies)
RUN mkdir -p /tmp/prod
COPY package.json bun.lockb /tmp/prod/
RUN cd /tmp/prod && bun install --frozen-lockfile --production
# copy node_modules from temp directory
# then copy all (non-ignored) project files into the image
FROM base AS prerelease
COPY --from=install /tmp/dev/node_modules node_modules
COPY . .
# [optional] tests & build
ENV NODE_ENV=production
RUN bun test
RUN bun build --compile src/index.ts --outfile=aniplay
# copy production dependencies and source code into final image
FROM base AS release
COPY --from=install /tmp/prod/node_modules node_modules
COPY --from=prerelease /usr/app/src ./src
COPY --from=prerelease /usr/app/package.json .
COPY --from=prerelease /usr/app/tsconfig.json .
# TODO: uncomment once v2 is ready
# COPY --from=prerelease /usr/app/drizzle.config.ts .
# run the app
USER bun
EXPOSE 3000
ENTRYPOINT [ "bun", "run", "prod:server" ]

View File

@@ -1,12 +1,72 @@
```
npm install
npm run dev
```
# Aniplay API
```
npm run deploy
```
API for [Aniplay](https://github.com/silverAndroid/aniplay), built with Cloudflare Workers, Hono, and Drizzle ORM.
## Tech Stack
- **Cloudflare Workers**: Serverless execution environment.
- **Hono**: Ultrafast web framework (OpenAPI).
- **GraphQL**: Used internally for communicating with the [AniList](https://anilist.co) API.
- **Drizzle ORM**: TypeScript ORM for D1 (Cloudflare's serverless SQL database).
- **Vitest**: Testing framework.
## Prerequisites
- **Node.js**
- **pnpm**: Package manager.
## Getting Started
1. **Installation**
```bash
pnpm install
```
2. **Environment Setup**
Generate the environment types:
```bash
pnpm exec wrangler types
```
3. **Database Setup**
Apply migrations to the local D1 database:
```bash
pnpm exec wrangler d1 migrations apply aniplay
```
## Development
If a route is internal-only or doesn't need to appear on the OpenAPI spec (that's autogenerated by Hono), use the `Hono` class. Otherwise, use the `OpenAPIHono` class from `@hono/zod-openapi`.
### Running Locally
Start the development server:
```bash
pnpm run dev
```
### Testing
Run the tests using Vitest:
```bash
pnpm test
```
## Deployment
Deploy to Cloudflare Workers:
```bash
pnpm run deploy
```
## Project Structure
- `src/controllers`: API route handlers (titles, episodes, search, etc.)
- `src/libs`: Shared utilities and logic (AniList integration, background tasks)
- `src/middleware`: Middleware handlers (authentication, authorization, etc.)
- `src/models`: Database schema and models
- `src/scripts`: Utility scripts for maintenance and setup
- `src/types`: TypeScript type definitions

View File

@@ -6,8 +6,7 @@
"type": "module",
"scripts": {
"dev": "wrangler dev src/index.ts --port 8080",
"env:generate": "tsx src/scripts/generateEnv.ts",
"env:verify": "tsx src/scripts/verifyEnv.ts",
"deploy": "wrangler deploy --minify src/index.ts",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"test": "vitest",
@@ -32,6 +31,7 @@
},
"devDependencies": {
"@cloudflare/vitest-pool-workers": "^0.10.15",
"@graphql-typed-document-node/core": "^3.2.0",
"@trivago/prettier-plugin-sort-imports": "^4.3.0",
"@types/lodash.mapkeys": "^4.6.9",
"@types/luxon": "^3.6.2",

3
pnpm-lock.yaml generated
View File

@@ -47,6 +47,9 @@ importers:
"@cloudflare/vitest-pool-workers":
specifier: ^0.10.15
version: 0.10.15(@vitest/runner@3.2.4)(@vitest/snapshot@3.2.4)(vitest@3.2.4)
"@graphql-typed-document-node/core":
specifier: ^3.2.0
version: 3.2.0(graphql@16.12.0)
"@trivago/prettier-plugin-sort-imports":
specifier: ^4.3.0
version: 4.3.0(prettier@3.7.4)

View File

@@ -84,7 +84,7 @@ app.openapi(route, async (c) => {
isComplete,
);
if (isComplete) {
await updateWatchStatus(c.req, deviceId, aniListId, "COMPLETED");
await updateWatchStatus(deviceId, aniListId, "COMPLETED");
}
if (!user) {

View File

@@ -15,7 +15,7 @@ type AiringSchedule = {
id: number;
};
export async function getUpcomingTitlesFromAnilist(req: HonoRequest) {
export async function getUpcomingTitlesFromAnilist() {
const durableObjectId = env.ANILIST_DO.idFromName("GLOBAL");
const stub = env.ANILIST_DO.get(durableObjectId);

View File

@@ -9,8 +9,8 @@ import { getUpcomingTitlesFromAnilist } from "./anilist";
const app = new Hono();
app.post("/", async (c) => {
const titles = await getUpcomingTitlesFromAnilist(c.req);
export async function checkUpcomingTitles() {
const titles = await getUpcomingTitlesFromAnilist();
await Promise.allSettled(
titles.map(async (title) => {
@@ -44,6 +44,10 @@ app.post("/", async (c) => {
});
}),
);
}
app.post("/", async (c) => {
await checkUpcomingTitles();
return c.json(SuccessResponse, 200);
});

View File

@@ -30,7 +30,7 @@ export async function fetchPopularTitlesFromAnilist(
);
break;
case "upcoming":
data = await stub.nextSeasonPopular(next.season, next.year, limit);
data = await stub.nextSeasonPopular(next.season, next.year, page, limit);
break;
default:
throw new Error(`Unknown category: ${category}`);

View File

@@ -51,7 +51,7 @@ describe('requests the "/title" route', () => {
headers: new Headers({ "x-anilist-token": "asd" }),
});
expect(await response.json()).toMatchSnapshot();
await expect(response.json()).resolves.toMatchSnapshot();
expect(response.status).toBe(200);
});
@@ -63,7 +63,7 @@ describe('requests the "/title" route', () => {
const response = await app.request("/title?id=10");
expect(await response.json()).toMatchSnapshot();
await expect(response.json()).resolves.toMatchSnapshot();
expect(response.status).toBe(200);
});
@@ -75,7 +75,7 @@ describe('requests the "/title" route', () => {
const response = await app.request("/title?id=-1");
expect(await response.json()).toEqual({ success: false });
await expect(response.json()).resolves.toEqual({ success: false });
expect(response.status).toBe(404);
});
});

View File

@@ -2,6 +2,7 @@ import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
import { fetchFromMultipleSources } from "~/libs/fetchFromMultipleSources";
import { userProfileMiddleware } from "~/middleware/userProfile";
import {
AniListIdQuerySchema,
ErrorResponse,
@@ -9,6 +10,7 @@ import {
SuccessResponseSchema,
} from "~/types/schema";
import { Title } from "~/types/title";
import type { User } from "~/types/user";
const app = new OpenAPIHono();
@@ -40,6 +42,7 @@ const route = createRoute({
description: "Title could not be found",
},
},
middleware: [userProfileMiddleware],
});
app.openapi(route, async (c) => {
@@ -55,7 +58,12 @@ app.openapi(route, async (c) => {
}
const { result: title, errorOccurred } = await fetchFromMultipleSources([
() => fetchTitleFromAnilist(aniListId, aniListToken ?? undefined),
() =>
fetchTitleFromAnilist(
aniListId,
(c.get("user") as User)?.id,
aniListToken ?? undefined,
),
]);
if (errorOccurred) {

View File

@@ -1,5 +1,4 @@
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
import type { HonoRequest } from "hono";
import { AnilistUpdateType } from "~/libs/anilist/updateType.ts";
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
@@ -22,7 +21,6 @@ const UpdateWatchStatusRequest = z.object({
deviceId: z.string(),
watchStatus: WatchStatus.nullable(),
titleId: AniListIdSchema,
isRetrying: z.boolean().optional().default(false),
});
const route = createRoute({
@@ -64,7 +62,6 @@ const route = createRoute({
});
export async function updateWatchStatus(
req: HonoRequest,
deviceId: string,
titleId: number,
watchStatus: WatchStatus | null,
@@ -82,14 +79,8 @@ export async function updateWatchStatus(
}
app.openapi(route, async (c) => {
const {
deviceId,
watchStatus,
titleId,
isRetrying = false,
} = await c.req.json<typeof UpdateWatchStatusRequest._type>();
const aniListToken = c.req.header("X-AniList-Token");
const { deviceId, watchStatus, titleId } =
await c.req.json<typeof UpdateWatchStatusRequest._type>();
// Check if we should use mock data
const { useMockData } = await import("~/libs/useMockData");
if (useMockData()) {
@@ -97,26 +88,29 @@ app.openapi(route, async (c) => {
return c.json(SuccessResponse, { status: 200 });
}
if (!isRetrying) {
try {
await updateWatchStatus(c.req, deviceId, titleId, watchStatus);
} catch (error) {
console.error("Error setting watch status");
console.error(error);
return c.json(ErrorResponse, { status: 500 });
}
try {
await updateWatchStatus(deviceId, titleId, watchStatus);
} catch (error) {
console.error("Error setting watch status");
console.error(error);
return c.json(ErrorResponse, { status: 500 });
}
await queueTask(
"ANILIST_UPDATES",
{
deviceId,
watchStatus,
titleId,
updateType: AnilistUpdateType.UpdateWatchStatus,
},
{ req: c.req, scheduleConfig: { delay: { minute: 1 } } },
);
const aniListToken = c.req.header("X-AniList-Token");
if (aniListToken) {
await queueTask(
"ANILIST_UPDATES",
{
[AnilistUpdateType.UpdateWatchStatus]: {
aniListToken,
titleId,
watchStatus,
},
updateType: AnilistUpdateType.UpdateWatchStatus,
},
{ req: c.req, scheduleConfig: { delay: { minute: 1 } } },
);
}
return c.json(SuccessResponse, { status: 200 });
});

View File

@@ -1,12 +1,18 @@
import { swaggerUI } from "@hono/swagger-ui";
import { OpenAPIHono } from "@hono/zod-openapi";
import { Duration, type DurationLike } from "luxon";
import { maybeUpdateLastConnectedAt } from "~/controllers/maybeUpdateLastConnectedAt";
import { onNewEpisode } from "~/controllers/internal/new-episode";
import { AnilistUpdateType } from "~/libs/anilist/updateType";
import { calculateExponentialBackoff } from "~/libs/calculateExponentialBackoff";
import type { QueueName } from "~/libs/tasks/queueName.ts";
import {
MAX_QUEUE_DELAY_SECONDS,
type QueueBody,
} from "~/libs/tasks/queueTask";
import { maybeUpdateLastConnectedAt } from "~/middleware/maybeUpdateLastConnectedAt";
import { onNewEpisode } from "./controllers/internal/new-episode";
import { AnilistUpdateType } from "./libs/anilist/updateType";
import type { QueueBody } from "./libs/tasks/queueTask";
import { checkUpcomingTitles } from "./controllers/internal/upcoming-titles";
export const app = new OpenAPIHono<{ Bindings: Env }>();
@@ -73,50 +79,101 @@ app.get("/docs", swaggerUI({ url: "/openapi.json" }));
export default {
fetch: app.fetch,
async queue(batch) {
switch (batch.queue as QueueName) {
case "ANILIST_UPDATES":
for (const message of (
batch as MessageBatch<QueueBody["ANILIST_UPDATES"]>
).messages) {
switch (message.body.updateType) {
onMessageQueue(batch, async (message, queueName) => {
switch (queueName) {
case "ANILIST_UPDATES":
const anilistUpdateBody =
message.body as QueueBody["ANILIST_UPDATES"];
console.log("queue run", message.body);
switch (anilistUpdateBody.updateType) {
case AnilistUpdateType.UpdateWatchStatus:
if (!message.body[AnilistUpdateType.UpdateWatchStatus]) {
throw new Error(
if (!anilistUpdateBody[AnilistUpdateType.UpdateWatchStatus]) {
console.error(
`Discarding update, unknown body ${JSON.stringify(message.body)}`,
);
return;
}
const { updateWatchStatusOnAnilist } =
await import("~/controllers/watch-status/anilist");
const payload = message.body[AnilistUpdateType.UpdateWatchStatus];
const payload =
anilistUpdateBody[AnilistUpdateType.UpdateWatchStatus];
await updateWatchStatusOnAnilist(
payload.titleId,
payload.watchStatus,
payload.aniListToken,
);
break;
default:
throw new Error(
`Unhandled update type: ${anilistUpdateBody.updateType}`,
);
}
message.ack();
}
break;
case "NEW_EPISODE":
for (const message of (batch as MessageBatch<QueueBody["NEW_EPISODE"]>)
.messages) {
break;
case "NEW_EPISODE":
const newEpisodeBody = message.body as QueueBody["NEW_EPISODE"];
await onNewEpisode(
message.body.aniListId,
message.body.episodeNumber,
newEpisodeBody.aniListId,
newEpisodeBody.episodeNumber,
);
message.ack();
}
break;
}
break;
default:
throw new Error(`Unhandled queue name: ${queueName}`);
}
});
},
async scheduled(event, env, ctx) {
const { processDelayedTasks } =
await import("~/libs/tasks/processDelayedTasks");
await processDelayedTasks(env, ctx);
switch (event.cron) {
case "0 */12 * * *":
const { processDelayedTasks } =
await import("~/libs/tasks/processDelayedTasks");
await processDelayedTasks(env);
break;
case "0 18 * * *":
const { checkUpcomingTitles } =
await import("~/controllers/internal/upcoming-titles");
await checkUpcomingTitles();
break;
default:
throw new Error(`Unhandled cron: ${event.cron}`);
}
},
} satisfies ExportedHandler<Env>;
const retryDelayConfig: Partial<
Record<QueueName, { min: DurationLike; max: DurationLike }>
> = {
NEW_EPISODE: {
min: Duration.fromObject({ hours: 1 }),
max: Duration.fromObject({ hours: 12 }),
},
};
function onMessageQueue<QN extends QueueName>(
messageBatch: MessageBatch<unknown>,
callback: (message: Message<QueueBody[QN]>, queueName: QN) => void,
) {
for (const message of messageBatch.messages) {
try {
callback(message as Message<QueueBody[QN]>, messageBatch.queue as QN);
message.ack();
} catch (error) {
console.error(
`Failed to process message ${message.id} for queue ${messageBatch.queue} with body ${JSON.stringify(message.body)}`,
);
console.error(error);
message.retry({
delaySeconds: Math.min(
calculateExponentialBackoff({
attempt: message.attempts,
baseMin: retryDelayConfig[messageBatch.queue as QN]?.min,
absCap: retryDelayConfig[messageBatch.queue as QN]?.max,
}),
MAX_QUEUE_DELAY_SECONDS,
),
});
}
}
}
export { AnilistDurableObject as AnilistDo } from "~/libs/anilist/anilist-do.ts";

View File

@@ -1,5 +1,7 @@
import type { TypedDocumentNode } from "@graphql-typed-document-node/core";
import { DurableObject } from "cloudflare:workers";
import { print } from "graphql";
import { DateTime } from "luxon";
import { z } from "zod";
import {
@@ -7,6 +9,7 @@ import {
GetNextEpisodeAiringAtQuery,
GetPopularTitlesQuery,
GetTitleQuery,
GetTitleUserDataQuery,
GetTrendingTitlesQuery,
GetUpcomingTitlesQuery,
GetUserProfileQuery,
@@ -17,6 +20,7 @@ import {
SearchQuery,
} from "~/libs/anilist/queries";
import { sleep } from "~/libs/sleep.ts";
import type { Title } from "~/types/title";
const nextAiringEpisodeSchema = z.nullable(
z.object({
@@ -37,30 +41,54 @@ export class AnilistDurableObject extends DurableObject {
return new Response("Not found", { status: 404 });
}
async getTitle(id: number, token?: string) {
return this.handleCachedRequest(
`title:${id}`,
async () => {
const anilistResponse = await this.fetchFromAnilist(
GetTitleQuery,
{ id },
token,
);
return anilistResponse?.Media ?? null;
},
(media) => {
if (!media) return undefined;
// Cast to any to access fragment fields without unmasking
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
(media as any)?.nextAiringEpisode,
);
const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000;
if (airingAt) {
return airingAt - Date.now();
}
return undefined;
},
async getTitle(
id: number,
userId?: number,
token?: string,
): Promise<Title | null> {
const promises: Promise<any>[] = [
this.handleCachedRequest(
`title:${id}`,
async () => {
const anilistResponse = await this.fetchFromAnilist(GetTitleQuery, {
id,
});
return anilistResponse?.Media ?? null;
},
(media) => {
if (!media) return undefined;
// Cast to any to access fragment fields without unmasking
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
(media as any)?.nextAiringEpisode,
);
return nextAiringEpisode?.airingAt
? DateTime.fromMillis(nextAiringEpisode?.airingAt)
: undefined;
},
),
];
promises.push(
userId
? this.handleCachedRequest(
`title:${id}:${userId}`,
async () => {
const anilistResponse = await this.fetchFromAnilist(
GetTitleUserDataQuery,
{ id },
{ token },
);
return anilistResponse?.Media ?? null;
},
DateTime.now().plus({ days: 1 }),
)
: Promise.resolve({ mediaListEntry: null }),
);
return Promise.all(promises).then(([title, userTitle]) => ({
...title,
...userTitle,
}));
}
async getNextEpisodeAiringAt(id: number) {
@@ -72,7 +100,7 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Media;
},
60 * 60 * 1000,
DateTime.now().plus({ hours: 1 }),
);
}
@@ -87,7 +115,7 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
60 * 60 * 1000,
DateTime.now().plus({ hours: 1 }),
);
}
@@ -100,8 +128,7 @@ export class AnilistDurableObject extends DurableObject {
) {
return this.handleCachedRequest(
`popular:${JSON.stringify({ season, seasonYear, nextSeason, nextYear, limit })}`,
async () => {
console.log(nextSeason, nextYear, print(BrowsePopularQuery));
() => {
return this.fetchFromAnilist(BrowsePopularQuery, {
season,
seasonYear,
@@ -110,21 +137,27 @@ export class AnilistDurableObject extends DurableObject {
limit,
});
},
24 * 60 * 60 * 1000,
DateTime.now().plus({ days: 1 }),
);
}
async nextSeasonPopular(nextSeason: any, nextYear: number, limit: number) {
async nextSeasonPopular(
nextSeason: any,
nextYear: number,
page: number,
limit: number,
) {
return this.handleCachedRequest(
`next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`,
`next_season:${JSON.stringify({ nextSeason, nextYear, page, limit })}`,
async () => {
return this.fetchFromAnilist(NextSeasonPopularQuery, {
nextSeason,
nextYear,
limit,
});
page,
}).then((data) => data?.Page);
},
24 * 60 * 60 * 1000,
DateTime.now().plus({ days: 1 }),
);
}
@@ -137,15 +170,14 @@ export class AnilistDurableObject extends DurableObject {
return this.handleCachedRequest(
`popular:${JSON.stringify({ page, limit, season, seasonYear })}`,
async () => {
const data = await this.fetchFromAnilist(GetPopularTitlesQuery, {
return this.fetchFromAnilist(GetPopularTitlesQuery, {
page,
limit,
season,
seasonYear,
});
return data?.Page;
}).then((data) => data?.Page);
},
24 * 60 * 60 * 1000,
DateTime.now().plus({ days: 1 }),
);
}
@@ -159,7 +191,7 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
24 * 60 * 60 * 1000,
DateTime.now().plus({ days: 1 }),
);
}
@@ -178,7 +210,7 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
24 * 60 * 60 * 1000,
DateTime.now().plus({ days: 1 }),
);
}
@@ -186,10 +218,10 @@ export class AnilistDurableObject extends DurableObject {
return this.handleCachedRequest(
`user:${token}`,
async () => {
const data = await this.fetchFromAnilist(GetUserQuery, {}, token);
const data = await this.fetchFromAnilist(GetUserQuery, {}, { token });
return data?.Viewer;
},
60 * 60 * 24 * 30 * 1000,
DateTime.now().plus({ days: 30 }),
);
}
@@ -200,11 +232,11 @@ export class AnilistDurableObject extends DurableObject {
const data = await this.fetchFromAnilist(
GetUserProfileQuery,
{ token },
token,
{ token },
);
return data?.Viewer;
},
60 * 60 * 24 * 30 * 1000,
DateTime.now().plus({ days: 30 }),
);
}
@@ -216,7 +248,7 @@ export class AnilistDurableObject extends DurableObject {
const data = await this.fetchFromAnilist(
MarkEpisodeAsWatchedMutation,
{ titleId, episodeNumber },
token,
{ token },
);
return data?.SaveMediaListEntry;
}
@@ -225,7 +257,7 @@ export class AnilistDurableObject extends DurableObject {
const data = await this.fetchFromAnilist(
MarkTitleAsWatchedMutation,
{ titleId },
token,
{ token },
);
return data?.SaveMediaListEntry;
}
@@ -234,7 +266,7 @@ export class AnilistDurableObject extends DurableObject {
async handleCachedRequest<T>(
key: string,
fetcher: () => Promise<T>,
ttl?: number | ((data: T) => number | undefined),
ttl?: DateTime | ((data: T) => DateTime | undefined),
) {
const cache = await this.state.storage.get(key);
console.debug(`Retrieving request ${key} from cache:`, cache != null);
@@ -246,9 +278,8 @@ export class AnilistDurableObject extends DurableObject {
await this.state.storage.put(key, result);
const calculatedTtl = typeof ttl === "function" ? ttl(result) : ttl;
if (calculatedTtl && calculatedTtl > 0) {
const alarmTime = Date.now() + calculatedTtl;
if (calculatedTtl) {
const alarmTime = calculatedTtl.toMillis();
await this.state.storage.setAlarm(alarmTime);
await this.state.storage.put(`alarm:${key}`, alarmTime);
}
@@ -259,11 +290,13 @@ export class AnilistDurableObject extends DurableObject {
async alarm() {
const now = Date.now();
const alarms = await this.state.storage.list({ prefix: "alarm:" });
console.debug(`Retrieved alarms from cache:`, Object.entries(alarms));
for (const [key, ttl] of Object.entries(alarms)) {
if (now >= ttl) {
// The key in alarms is `alarm:${storageKey}`
// We want to delete the storageKey
const storageKey = key.replace("alarm:", "");
console.debug(`Deleting storage key ${storageKey} & alarm ${key}`);
await this.state.storage.delete(storageKey);
await this.state.storage.delete(key);
}
@@ -271,10 +304,13 @@ export class AnilistDurableObject extends DurableObject {
}
async fetchFromAnilist<Result = any, Variables = any>(
queryString: string,
query: TypedDocumentNode<Result, Variables>,
variables: Variables,
token?: string | undefined,
): Promise<Result> {
{
token,
shouldRetryOnRateLimit = true,
}: { token?: string | undefined; shouldRetryOnRateLimit?: boolean } = {},
): Promise<Result | undefined> {
const headers: any = {
"Content-Type": "application/json",
};
@@ -285,7 +321,7 @@ export class AnilistDurableObject extends DurableObject {
// Use the query passed in, or fallback if needed (though we expect it to be passed)
// We print the query to string
// const queryString = print(query);
const queryString = print(query);
const response = await fetch(`${this.env.PROXY_URL}/proxy`, {
method: "POST",
@@ -304,14 +340,17 @@ export class AnilistDurableObject extends DurableObject {
});
// 1. Handle Rate Limiting (429)
if (response.status === 429) {
if (shouldRetryOnRateLimit && response.status === 429) {
const retryAfter = await response
.json()
.json<{ headers: Record<string, string> }>()
.then(({ headers }) => new Headers(headers).get("Retry-After"));
console.log("429, retrying in", retryAfter);
await sleep(Number(retryAfter || 1) * 1000); // specific fallback or ensure logic
return this.fetchFromAnilist(query, variables, token);
return this.fetchFromAnilist(query, variables, {
token,
shouldRetryOnRateLimit: false,
});
}
// 2. Handle HTTP Errors (like 404 or 500)

View File

@@ -5,6 +5,7 @@ import type { Title } from "~/types/title";
export async function fetchTitleFromAnilist(
id: number,
userId?: number | undefined,
token?: string | undefined,
): Promise<Title | undefined> {
if (useMockData()) {
@@ -17,8 +18,7 @@ export async function fetchTitleFromAnilist(
);
const stub = env.ANILIST_DO.get(durableObjectId);
const data = await stub.getTitle(id, token);
const data = await stub.getTitle(id, userId, token);
if (!data) {
return undefined;
}

View File

@@ -14,6 +14,18 @@ export const GetTitleQuery = graphql(
[MediaFragment],
);
export const GetTitleUserDataQuery = graphql(`
query GetTitleUserData($id: Int!) {
Media(id: $id) {
mediaListEntry {
id
progress
status
}
}
}
`);
export const SearchQuery = graphql(
`
query Search($query: String!, $page: Int!, $limit: Int!) {
@@ -247,8 +259,9 @@ export const NextSeasonPopularQuery = graphql(
$nextSeason: MediaSeason
$nextYear: Int
$limit: Int!
$page: Int!
) {
Page(page: 1, perPage: $limit) {
Page(page: $page, perPage: $limit) {
media(
season: $nextSeason
seasonYear: $nextYear

View File

@@ -0,0 +1,53 @@
import { Duration, type DurationLike } from "luxon";
interface CalculateExponentialBackoffOptions {
attempt: number;
baseMin?: DurationLike;
absCap?: DurationLike;
fuzzFactor?: number;
}
/**
* Generates a backoff time where both the Minimum floor and Maximum ceiling
* are "fuzzed" with jitter to prevent clustering at the edges.
*
* @param attempt - The current retry attempt (0-indexed).
* @param baseMin - The nominal minimum wait time (default: 1s).
* @param absCap - The absolute maximum wait time (default: 60s).
* @param fuzzFactor - How much to wobble the edges (0.1 = +/- 10%).
*
* @returns A random duration between the nominal minimum and maximum, in seconds.
*/
export function calculateExponentialBackoff({
attempt,
baseMin: baseMinDuration = Duration.fromObject({ minutes: 1 }),
absCap: absCapDuration = Duration.fromObject({ hours: 1 }),
fuzzFactor = 0.2,
}: CalculateExponentialBackoffOptions): number {
const baseMin = Duration.fromDurationLike(baseMinDuration).as("seconds");
const absCap = Duration.fromDurationLike(absCapDuration).as("seconds");
// 1. Calculate nominal boundaries
// Example: If baseMin is 1s, the nominal boundaries are 1s, 2s, 4s, 8s... (The 'ceiling' grows exponentially)
const nominalMin = baseMin;
const nominalCeiling = Math.min(baseMin * Math.pow(2, attempt), absCap);
// 2. Fuzz the Min (The Floor)
// Example: If min is 1s and fuzz is 0.2, the floor becomes random between 0.8s and 1.2s
const minFuzz = nominalMin * fuzzFactor;
const fuzzedMin = nominalMin + (Math.random() * 2 * minFuzz - minFuzz);
// 3. Fuzz the Max (The Ceiling)
// Example: If ceiling is 4s (and fuzz is 0.2), it becomes random between 3.2s and 4.8s
const maxFuzz = nominalCeiling * fuzzFactor;
const fuzzedCeiling =
nominalCeiling + (Math.random() * 2 * maxFuzz - maxFuzz);
// Safety: Ensure we don't return a negative number or cross boundaries weirdly
// (e.g. if fuzz makes min > max, we swap or clamp)
const safeMin = Math.max(0, fuzzedMin);
const safeMax = Math.max(safeMin, fuzzedCeiling);
// 4. Return random value in the new fuzzy range
return safeMin + Math.random() * (safeMax - safeMin);
}

View File

@@ -3,11 +3,13 @@ import mapKeys from "lodash.mapkeys";
import { Case, changeStringCase } from "../changeStringCase";
export function getAdminSdkCredentials(env: Cloudflare.Env = cloudflareEnv) {
export function getAdminSdkCredentials(
env: Cloudflare.Env = cloudflareEnv,
): AdminSdkCredentials {
return mapKeys(
JSON.parse(env.ADMIN_SDK_JSON) as AdminSdkCredentials,
(_, key) => changeStringCase(key, Case.snake_case, Case.camelCase),
);
) satisfies AdminSdkCredentials;
}
export interface AdminSdkCredentials {

View File

@@ -1,204 +1,158 @@
import { env } from "cloudflare:test";
import { DateTime } from "luxon";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { getTestEnv } from "../test/getTestEnv";
import { processDelayedTasks } from "./processDelayedTasks";
describe("processDelayedTasks", () => {
let mockEnv: Cloudflare.Env;
let mockCtx: ExecutionContext;
let kvGetSpy: ReturnType<typeof vi.fn>;
let kvDeleteSpy: ReturnType<typeof vi.fn>;
let kvPutSpy: ReturnType<typeof vi.fn>;
let queueSendSpy: ReturnType<typeof vi.fn>;
beforeEach(() => {
kvGetSpy = vi.fn(() => Promise.resolve(null));
kvDeleteSpy = vi.fn(() => Promise.resolve());
kvPutSpy = vi.fn(() => Promise.resolve());
queueSendSpy = vi.fn(() => Promise.resolve());
mockEnv = {
DELAYED_TASKS: {
get: kvGetSpy,
delete: kvDeleteSpy,
put: kvPutSpy,
list: vi.fn(() =>
Promise.resolve({
keys: [],
list_complete: true as const,
cacheStatus: null,
}),
),
getWithMetadata: vi.fn(() =>
Promise.resolve({ value: null, metadata: null }),
),
} as any,
NEW_EPISODE: {
send: queueSendSpy,
} as any,
ANILIST_UPDATES: {
send: vi.fn(() => Promise.resolve()),
} as any,
} as any;
mockCtx = {
waitUntil: vi.fn(() => {}),
passThroughOnException: vi.fn(() => {}),
} as any;
beforeEach(async () => {
const tasksToDelete = await env.DELAYED_TASKS.list({
prefix: "delayed-task:",
});
console.log(`Found ${tasksToDelete.keys.length} tasks to delete`);
for (const task of tasksToDelete.keys) {
await env.DELAYED_TASKS.delete(task.name);
}
});
it("handles empty KV namespace", async () => {
await processDelayedTasks(mockEnv, mockCtx);
await processDelayedTasks(env);
expect(kvDeleteSpy).not.toHaveBeenCalled();
expect(queueSendSpy).not.toHaveBeenCalled();
await expect(
env.DELAYED_TASKS.list({ prefix: "delayed-task:" }).then(
(result) => result.keys,
),
).resolves.toHaveLength(0);
});
it("queues tasks within 12 hours of scheduled time", async () => {
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 6 * 3600; // 6 hours from now
it("queues tasks within 9 hours of scheduled time", async () => {
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 6 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 123, episodeNumber: 1 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-1",
createdAt: now - 18 * 3600,
createdAt: now.minus({ hours: 18 }).toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-1` }],
list_complete: true as const,
cacheStatus: null,
}),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
await processDelayedTasks(mockEnv, mockCtx);
expect(queueSendSpy).toHaveBeenCalledTimes(1);
expect(kvDeleteSpy).toHaveBeenCalledTimes(1);
expect(kvDeleteSpy).toHaveBeenCalledWith(
await env.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-1`,
JSON.stringify(taskMetadata),
);
await processDelayedTasks(env);
await expect(
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-1`),
).resolves.toBeNull();
});
it("does not queue tasks beyond 12 hours", async () => {
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 24 * 3600; // 24 hours from now
it("does not queue tasks beyond 9 hours", async () => {
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 24 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 456, episodeNumber: 2 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-2",
createdAt: now,
createdAt: now.toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-2` }],
list_complete: true as const,
cacheStatus: null,
}),
await env.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-2`,
JSON.stringify(taskMetadata),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
await processDelayedTasks(env);
await processDelayedTasks(mockEnv, mockCtx);
expect(queueSendSpy).not.toHaveBeenCalled();
expect(kvDeleteSpy).not.toHaveBeenCalled();
await expect(
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-2`),
).resolves.toBeTruthy();
});
it("increments retry count on queue failure", async () => {
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 1 * 3600; // 1 hour from now
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 789, episodeNumber: 3 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-3",
createdAt: now - 23 * 3600,
createdAt: now.minus({ hours: 23 }).toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-3` }],
list_complete: true as const,
cacheStatus: null,
}),
const mockEnv = getTestEnv({
NEW_EPISODE: {
send: vi.fn().mockRejectedValue(new Error("Queue error")),
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
},
});
await mockEnv.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-3`,
JSON.stringify(taskMetadata),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
queueSendSpy.mockRejectedValue(new Error("Queue error"));
await processDelayedTasks(mockEnv);
await processDelayedTasks(mockEnv, mockCtx);
expect(kvPutSpy).toHaveBeenCalledTimes(1);
const updatedMetadata = JSON.parse(kvPutSpy.mock.calls[0][1]);
const updatedMetadata = JSON.parse(
(await mockEnv.DELAYED_TASKS.get(
`delayed-task:${scheduledTime}:task-3`,
))!,
);
expect(updatedMetadata.retryCount).toBe(1);
expect(kvDeleteSpy).not.toHaveBeenCalled();
});
it("logs alert after 3 failed attempts", async () => {
const consoleErrorSpy = vi.fn(() => {});
const originalConsoleError = console.error;
console.error = consoleErrorSpy as any;
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 1 * 3600;
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 999, episodeNumber: 4 },
body: { aniListId: 789, episodeNumber: 4 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-4",
createdAt: now - 23 * 3600,
retryCount: 2, // Will become 3 after this failure
createdAt: now.minus({ hours: 23 }).toSeconds(),
retryCount: 2,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-4` }],
list_complete: true as const,
cacheStatus: null,
}),
const mockEnv = getTestEnv({
NEW_EPISODE: {
send: vi.fn().mockRejectedValue(new Error("Queue error")),
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
},
});
await mockEnv.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-4`,
JSON.stringify(taskMetadata),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
queueSendSpy.mockRejectedValue(new Error("Queue error"));
await processDelayedTasks(mockEnv, mockCtx);
await processDelayedTasks(mockEnv);
// Check that alert was logged
const alertCalls = consoleErrorSpy.mock.calls.filter((call: any) =>
call[0]?.includes("🚨 ALERT"),
);
expect(alertCalls.length).toBeGreaterThan(0);
console.error = originalConsoleError;
});
it("handles multiple tasks in single cron run", async () => {
const now = Math.floor(Date.now() / 1000);
const now = DateTime.now();
const task1Metadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 100, episodeNumber: 1 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: now + 2 * 3600,
scheduledEpochTime: now.plus({ hours: 2 }).toSeconds(),
taskId: "task-1",
createdAt: now - 20 * 3600,
createdAt: now.minus({ hours: 20 }).toSeconds(),
retryCount: 0,
};
@@ -206,47 +160,53 @@ describe("processDelayedTasks", () => {
queueName: "NEW_EPISODE",
body: { aniListId: 200, episodeNumber: 2 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: now + 5 * 3600,
scheduledEpochTime: now.plus({ hours: 5 }).toSeconds(),
taskId: "task-2",
createdAt: now - 19 * 3600,
createdAt: now.minus({ hours: 19 }).toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [
{ name: `delayed-task:${task1Metadata.scheduledEpochTime}:task-1` },
{ name: `delayed-task:${task2Metadata.scheduledEpochTime}:task-2` },
],
list_complete: true as const,
cacheStatus: null,
}),
await env.DELAYED_TASKS.put(
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
JSON.stringify(task1Metadata),
);
await env.DELAYED_TASKS.put(
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
JSON.stringify(task2Metadata),
);
kvGetSpy
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task1Metadata)))
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task2Metadata)));
await processDelayedTasks(env);
await processDelayedTasks(mockEnv, mockCtx);
expect(queueSendSpy).toHaveBeenCalledTimes(2);
expect(kvDeleteSpy).toHaveBeenCalledTimes(2);
await expect(
env.DELAYED_TASKS.get(
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
),
).resolves.toBeNull();
await expect(
env.DELAYED_TASKS.get(
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
),
).resolves.toBeNull();
});
it("skips tasks with null values in KV", async () => {
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: "delayed-task:123:invalid" }],
list_complete: true as const,
cacheStatus: null,
}),
);
const queueSendSpy = vi.fn().mockResolvedValue(undefined);
const mockEnv = getTestEnv({
NEW_EPISODE: {
send: queueSendSpy,
sendBatch: queueSendSpy,
},
ANILIST_UPDATES: {
send: queueSendSpy,
sendBatch: queueSendSpy,
},
});
await mockEnv.DELAYED_TASKS.put(`delayed-task:123:invalid`, null);
kvGetSpy.mockReturnValue(Promise.resolve(null));
await processDelayedTasks(mockEnv, mockCtx);
await processDelayedTasks(mockEnv);
expect(queueSendSpy).not.toHaveBeenCalled();
expect(kvDeleteSpy).not.toHaveBeenCalled();
await expect(
mockEnv.DELAYED_TASKS.get(`delayed-task:123:invalid`),
).resolves.toBeNull();
});
});

View File

@@ -2,15 +2,11 @@ import { DateTime } from "luxon";
import type { DelayedTaskMetadata } from "./delayedTask";
import { deserializeDelayedTask } from "./delayedTask";
import { queueTask } from "./queueTask";
import { MAX_QUEUE_DELAY_SECONDS, queueTask } from "./queueTask";
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
const RETRY_ALERT_THRESHOLD = 3;
export async function processDelayedTasks(
env: Cloudflare.Env,
ctx: ExecutionContext,
): Promise<void> {
export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
console.log("Starting delayed task processing cron job");
const kvNamespace = env.DELAYED_TASKS;
@@ -31,7 +27,7 @@ export async function processDelayedTasks(
console.log(`Found ${keys.length} delayed tasks to check`);
const currentTime = Math.floor(Date.now() / 1000);
const twelveHoursFromNow = currentTime + MAX_DELAY_SECONDS;
const maxQueueTime = currentTime + MAX_QUEUE_DELAY_SECONDS;
let processedCount = 0;
let queuedCount = 0;
@@ -40,16 +36,17 @@ export async function processDelayedTasks(
for (const key of keys) {
try {
const value = await kvNamespace.get(key.name);
if (!value) {
console.warn(`Task key ${key.name} has no value, skipping`);
if (!value || value == "null") {
console.warn(`Task key ${key.name} has no value, removing`);
await kvNamespace.delete(key.name);
continue;
}
const metadata: DelayedTaskMetadata = deserializeDelayedTask(value);
processedCount++;
// Check if task is ready to be queued (within 12 hours of scheduled time)
if (metadata.scheduledEpochTime <= twelveHoursFromNow) {
// Check if task is ready to be queued (within 9 hours of scheduled time)
if (metadata.scheduledEpochTime <= maxQueueTime) {
const remainingDelay = Math.max(
0,
metadata.scheduledEpochTime - currentTime,
@@ -100,7 +97,7 @@ export async function processDelayedTasks(
}
} else {
const hoursUntilReady =
(metadata.scheduledEpochTime - twelveHoursFromNow) / 3600;
(metadata.scheduledEpochTime - maxQueueTime) / 3600;
console.log(
`Task ${metadata.taskId} not ready yet (${hoursUntilReady.toFixed(1)} hours until queueable)`,
);

View File

@@ -81,8 +81,8 @@ describe("queueTask - delayed task handling", () => {
});
});
describe("tasks with delay > 9 hours", () => {
it("stores task in KV when delay exceeds 9 hours", async () => {
describe("tasks with delay > 12 hours", () => {
it("stores task in KV when delay exceeds 12 hours", async () => {
await queueTask(
"NEW_EPISODE",
{ aniListId: 111, episodeNumber: 4 },
@@ -98,12 +98,12 @@ describe("queueTask - delayed task handling", () => {
expect(queueSendSpy).not.toHaveBeenCalled();
});
it("stores task in KV when delay is 9 hours + 1 second", async () => {
it("stores task in KV when delay is 12 hours + 1 second", async () => {
await queueTask(
"NEW_EPISODE",
{ aniListId: 222, episodeNumber: 5 },
{
scheduleConfig: { delay: { hours: 9, seconds: 1 } },
scheduleConfig: { delay: { hours: 12, seconds: 1 } },
env: mockEnv,
},
);
@@ -176,7 +176,7 @@ describe("queueTask - delayed task handling", () => {
});
describe("epoch time scheduling", () => {
it("queues directly when epoch time is within 9 hours", async () => {
it("queues directly when epoch time is within 12 hours", async () => {
const futureTime = Math.floor(Date.now() / 1000) + 3600; // 1 hour from now
await queueTask(
@@ -192,7 +192,7 @@ describe("queueTask - delayed task handling", () => {
expect(kvPutSpy).not.toHaveBeenCalled();
});
it("stores in KV when epoch time is beyond 9 hours", async () => {
it("stores in KV when epoch time is beyond 12 hours", async () => {
const futureTime = Math.floor(Date.now() / 1000) + 24 * 3600; // 24 hours from now
await queueTask(

View File

@@ -30,6 +30,10 @@ interface QueueTaskOptionalArgs {
env?: Cloudflare.Env;
}
export const MAX_QUEUE_DELAY_SECONDS = Duration.fromObject({ hours: 12 }).as(
"seconds",
);
export async function queueTask(
queueName: QueueName,
body: QueueBody[QueueName],
@@ -42,10 +46,8 @@ export async function queueTask(
req?.header(),
);
const MAX_DELAY_SECONDS = Duration.fromObject({ hours: 9 }).as("seconds");
// If delay exceeds 9 hours, store in KV for later processing
if (scheduleTime > MAX_DELAY_SECONDS) {
// If delay exceeds 12 hours, store in KV for later processing
if (scheduleTime > MAX_QUEUE_DELAY_SECONDS) {
if (!env || !env.DELAYED_TASKS) {
throw new Error("DELAYED_TASKS KV namespace not available");
}
@@ -130,6 +132,9 @@ function buildTask(
scheduleTime = Duration.fromDurationLike(delay).as("second");
}
}
const authorizationHeader = headers?.["X-Anilist-Token"]
? { Authorization: `Bearer ${headers["X-Anilist-Token"]}` }
: {};
switch (queueName) {
case "ANILIST_UPDATES":
@@ -138,8 +143,8 @@ function buildTask(
body,
scheduleTime,
headers: {
...authorizationHeader,
"Content-Type": "application/json",
"X-Anilist-Token": headers?.["X-Anilist-Token"],
},
};
default:

View File

@@ -8,10 +8,12 @@ export function getTestEnvVariables(): Cloudflare.Env {
export function getTestEnv({
ADMIN_SDK_JSON = '{"client_email": "test@test.com", "project_id": "test-26g38"}',
LOG_DB_QUERIES = "false",
...mockEnv
}: Partial<Cloudflare.Env> = {}): Cloudflare.Env {
return {
...env,
ADMIN_SDK_JSON,
LOG_DB_QUERIES,
...mockEnv,
};
}

View File

@@ -0,0 +1,25 @@
import { createMiddleware } from "hono/factory";
import type { User } from "~/types/user";
export const userProfileMiddleware = createMiddleware<
Cloudflare.Env & {
Variables: {
user: User;
};
Bindings: Env;
}
>(async (c, next) => {
const aniListToken = await c.req.header("X-AniList-Token");
if (!aniListToken) {
return next();
}
const user = await c.env.ANILIST_DO.getByName("GLOBAL").getUser(aniListToken);
if (!user) {
return c.json({ error: "User not found" }, 401);
}
c.set("user", user);
return next();
});

View File

@@ -1,47 +0,0 @@
import { Project } from "ts-morph";
import { $ } from "zx";
import { logStep } from "~/libs/logStep";
await logStep(
'Re-generating "env.d.ts"',
() => $`wrangler types src/types/env.d.ts`.quiet(),
"Generated env.d.ts",
);
const secretNames = await logStep(
"Fetching secrets from Cloudflare",
async (): Promise<string[]> => {
const { stdout } = await $`wrangler secret list`.quiet();
return JSON.parse(stdout.toString()).map(
(secret: { name: string; type: "secret_text" }) => secret.name,
);
},
"Fetched secrets",
);
const project = new Project({});
const envSourceFile = project.addSourceFileAtPath("src/types/env.d.ts");
envSourceFile.insertImportDeclaration(2, {
isTypeOnly: true,
moduleSpecifier: "hono",
namedImports: ["Env as HonoEnv"],
});
envSourceFile
.getInterfaceOrThrow("Env")
.addExtends(["HonoEnv", "Record<string, unknown>"]);
envSourceFile.getInterfaceOrThrow("Env").addProperties(
secretNames.map((name) => ({
name,
type: `string`,
})),
);
await project.save();
await logStep(
"Formatting env.d.ts",
() => $`prettier --write src/types/env.d.ts`.quiet(),
"Formatted env.d.ts",
);

View File

@@ -1,40 +0,0 @@
import { readFile } from "fs/promises";
import { $, sleep } from "zx";
import { logStep } from "~/libs/logStep";
await $`cp src/types/env.d.ts /tmp/env.d.ts`.quiet();
await logStep(
'Generating "env.d.ts"',
// @ts-ignore
() => import("./generateEnv"),
"Generated env.d.ts",
);
await logStep("Comparing env.d.ts", async () => {
function filterComments(content: Buffer) {
return content
.toString()
.split("\n")
.filter((line) => !line.trim().startsWith("//"))
.join("\n");
}
const currentFileContent = filterComments(await readFile("/tmp/env.d.ts"));
const generatedFileContent = filterComments(
await readFile("src/types/env.d.ts"),
);
if (currentFileContent === generatedFileContent) {
console.log("env.d.ts is up to date");
return;
}
const isCI = process.env["IS_CI"] === "true";
const vcsCommand = isCI ? "git" : "sl";
await $`${vcsCommand} diff src/types/env.d.ts`.stdio("inherit");
// add 1 second to make sure spawn completes
await sleep(1000);
throw new Error("env.d.ts is out of date");
});

View File

@@ -21,11 +21,6 @@ export const MediaFragment = graphql(`
medium
}
countryOfOrigin
mediaListEntry {
id
progress
status
}
nextAiringEpisode {
timeUntilAiring
airingAt

View File

@@ -3,20 +3,24 @@ import { z } from "zod";
export type User = z.infer<typeof User>;
export const User = z
.object({
statistics: z.object({
minutesWatched: z.number().openapi({ type: "integer", format: "int64" }),
episodesWatched: z.number().openapi({ type: "integer", format: "int64" }),
count: z
.number()
.int() /* .openapi({ type: "integer", format: "int64" }) */,
meanScore: z.number().openapi({ type: "number", format: "float" }),
}),
id: z.number().openapi({ type: "integer", format: "int64" }),
name: z.string(),
avatar: z.object({
medium: z.string(),
large: z.string(),
}),
})
.optional()
.nullable();
export type UserProfile = z.infer<typeof UserProfile>;
export const UserProfile = z.object({
statistics: z.object({
minutesWatched: z.number().openapi({ type: "integer", format: "int64" }),
episodesWatched: z.number().openapi({ type: "integer", format: "int64" }),
count: z.number().int(),
meanScore: z.number().openapi({ type: "number", format: "float" }),
}),
id: z.number().openapi({ type: "integer", format: "int64" }),
name: z.string(),
avatar: z.object({
medium: z.string(),
large: z.string(),
}),
});

View File

@@ -2,32 +2,32 @@
// Generated by Wrangler by running `wrangler types` (hash: df24977940a31745cb42d562b6645de2)
// Runtime types generated with workerd@1.20251210.0 2025-11-28 nodejs_compat
declare namespace Cloudflare {
interface GlobalProps {
mainModule: typeof import("./src/index");
durableNamespaces: "AnilistDo";
}
interface Env {
DELAYED_TASKS: KVNamespace;
ADMIN_SDK_JSON: string;
CLOUDFLARE_TOKEN: string;
CLOUDFLARE_D1_TOKEN: string;
CLOUDFLARE_ACCOUNT_ID: string;
CLOUDFLARE_DATABASE_ID: string;
PROXY_URL: string;
USE_MOCK_DATA: string;
LOG_DB_QUERIES: string;
ANILIST_DO: DurableObjectNamespace<import("./src/index").AnilistDo>;
DB: D1Database;
ANILIST_UPDATES: Queue;
NEW_EPISODE: Queue;
}
interface GlobalProps {
mainModule: typeof import("./src/index");
durableNamespaces: "AnilistDo";
}
interface Env {
DELAYED_TASKS: KVNamespace;
ADMIN_SDK_JSON: string;
CLOUDFLARE_TOKEN: string;
CLOUDFLARE_D1_TOKEN: string;
CLOUDFLARE_ACCOUNT_ID: string;
CLOUDFLARE_DATABASE_ID: string;
PROXY_URL: string;
USE_MOCK_DATA: string;
LOG_DB_QUERIES: string;
ANILIST_DO: DurableObjectNamespace<import("./src/index").AnilistDo>;
DB: D1Database;
ANILIST_UPDATES: Queue;
NEW_EPISODE: Queue;
}
}
interface Env extends Cloudflare.Env {}
interface Env extends Cloudflare.Env { }
type StringifyValues<EnvType extends Record<string, unknown>> = {
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
};
declare namespace NodeJS {
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> {}
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> { }
}
// Begin runtime types
@@ -1644,7 +1644,7 @@ declare abstract class Body {
*/
declare var Response: {
prototype: Response;
new (body?: BodyInit | null, init?: ResponseInit): Response;
new(body?: BodyInit | null, init?: ResponseInit): Response;
error(): Response;
redirect(url: string, status?: number): Response;
json(any: any, maybeInit?: (ResponseInit | Response)): Response;
@@ -2192,7 +2192,7 @@ interface ReadableStream<R = any> {
*/
declare const ReadableStream: {
prototype: ReadableStream;
new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
new <R = any>(underlyingSource?: UnderlyingSource<R>, strategy?: QueuingStrategy<R>): ReadableStream<R>;
};
/**
@@ -3034,7 +3034,7 @@ type WebSocketEventMap = {
*/
declare var WebSocket: {
prototype: WebSocket;
new (url: string, protocols?: (string[] | string)): WebSocket;
new(url: string, protocols?: (string[] | string)): WebSocket;
readonly READY_STATE_CONNECTING: number;
readonly CONNECTING: number;
readonly READY_STATE_OPEN: number;
@@ -3091,7 +3091,7 @@ interface WebSocket extends EventTarget<WebSocketEventMap> {
extensions: string | null;
}
declare const WebSocketPair: {
new (): {
new(): {
0: WebSocket;
1: WebSocket;
};
@@ -9414,20 +9414,20 @@ interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder {
}
/** Possible outcomes of TLS verification */
declare type CertVerificationStatus =
/** Authentication succeeded */
"SUCCESS"
/** No certificate was presented */
| "NONE"
/** Failed because the certificate was self-signed */
| "FAILED:self signed certificate"
/** Failed because the certificate failed a trust chain check */
| "FAILED:unable to verify the first certificate"
/** Failed because the certificate not yet valid */
| "FAILED:certificate is not yet valid"
/** Failed because the certificate is expired */
| "FAILED:certificate has expired"
/** Failed for another unspecified reason */
| "FAILED";
/** Authentication succeeded */
"SUCCESS"
/** No certificate was presented */
| "NONE"
/** Failed because the certificate was self-signed */
| "FAILED:self signed certificate"
/** Failed because the certificate failed a trust chain check */
| "FAILED:unable to verify the first certificate"
/** Failed because the certificate not yet valid */
| "FAILED:certificate is not yet valid"
/** Failed because the certificate is expired */
| "FAILED:certificate has expired"
/** Failed for another unspecified reason */
| "FAILED";
/**
* An upstream endpoint's response to a TCP `keepalive` message from Cloudflare.
*/
@@ -9478,14 +9478,14 @@ interface D1ExecResult {
duration: number;
}
type D1SessionConstraint =
// Indicates that the first query should go to the primary, and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
'first-primary'
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
| 'first-unconstrained';
// Indicates that the first query should go to the primary, and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
'first-primary'
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
| 'first-unconstrained';
type D1SessionBookmark = string;
declare abstract class D1Database {
prepare(query: string): D1PreparedStatement;
@@ -9599,7 +9599,7 @@ declare type EmailExportedHandler<Env = unknown> = (message: ForwardableEmailMes
declare module "cloudflare:email" {
let _EmailMessage: {
prototype: EmailMessage;
new (from: string, to: string, raw: ReadableStream | string): EmailMessage;
new(from: string, to: string, raw: ReadableStream | string): EmailMessage;
};
export { _EmailMessage as EmailMessage };
}
@@ -10059,16 +10059,16 @@ declare namespace Rpc {
// cloneable composite types. This allows types defined with the "interface" keyword to pass the
// serializable check as well. Otherwise, only types defined with the "type" keyword would pass.
type Serializable<T> =
// Structured cloneables
BaseType
// Structured cloneable composites
| Map<T extends Map<infer U, unknown> ? Serializable<U> : never, T extends Map<unknown, infer U> ? Serializable<U> : never> | Set<T extends Set<infer U> ? Serializable<U> : never> | ReadonlyArray<T extends ReadonlyArray<infer U> ? Serializable<U> : never> | {
[K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
}
// Special types
| Stub<Stubable>
// Serialized as stubs, see `Stubify`
| Stubable;
// Structured cloneables
BaseType
// Structured cloneable composites
| Map<T extends Map<infer U, unknown> ? Serializable<U> : never, T extends Map<unknown, infer U> ? Serializable<U> : never> | Set<T extends Set<infer U> ? Serializable<U> : never> | ReadonlyArray<T extends ReadonlyArray<infer U> ? Serializable<U> : never> | {
[K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
}
// Special types
| Stub<Stubable>
// Serialized as stubs, see `Stubify`
| Stubable;
// Base type for all RPC stubs, including common memory management methods.
// `T` is used as a marker type for unwrapping `Stub`s later.
interface StubBase<T extends Stubable> extends Disposable {
@@ -10083,8 +10083,8 @@ declare namespace Rpc {
type Stubify<T> = T extends Stubable ? Stub<T> : T extends Map<infer K, infer V> ? Map<Stubify<K>, Stubify<V>> : T extends Set<infer V> ? Set<Stubify<V>> : T extends Array<infer V> ? Array<Stubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Stubify<V>> : T extends BaseType ? T : T extends {
[key: string | number]: any;
} ? {
[K in keyof T]: Stubify<T[K]>;
} : T;
[K in keyof T]: Stubify<T[K]>;
} : T;
// Recursively rewrite all `Stub<T>`s with the corresponding `T`s.
// Note we use `StubBase` instead of `Stub` here to avoid circular dependencies:
// `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`.
@@ -10092,8 +10092,8 @@ declare namespace Rpc {
type Unstubify<T> = T extends StubBase<infer V> ? V : T extends Map<infer K, infer V> ? Map<Unstubify<K>, Unstubify<V>> : T extends Set<infer V> ? Set<Unstubify<V>> : T extends Array<infer V> ? Array<Unstubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Unstubify<V>> : T extends BaseType ? T : T extends {
[key: string | number]: unknown;
} ? {
[K in keyof T]: Unstubify<T[K]>;
} : T;
[K in keyof T]: Unstubify<T[K]>;
} : T;
type UnstubifyAll<A extends any[]> = {
[I in keyof A]: Unstubify<A[I]>;
};
@@ -10166,7 +10166,7 @@ declare namespace Cloudflare {
[K in keyof MainModule]: LoopbackForExport<MainModule[K]>
// If the export is listed in `durableNamespaces`, then it is also a
// DurableObjectNamespace.
& (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace<DoInstance> : DurableObjectNamespace<undefined> : DurableObjectNamespace<undefined> : {});
& (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace<DoInstance> : DurableObjectNamespace<undefined> : DurableObjectNamespace<undefined> : {});
};
}
declare namespace CloudflareWorkersModule {
@@ -10251,6 +10251,9 @@ declare namespace CloudflareWorkersModule {
export const env: Cloudflare.Env;
export const exports: Cloudflare.Exports;
}
declare module 'cloudflare:test' {
export = CloudflareWorkersModule;
}
declare module 'cloudflare:workers' {
export = CloudflareWorkersModule;
}
@@ -10822,10 +10825,10 @@ interface WorkflowInstanceCreateOptions<PARAMS = unknown> {
}
type InstanceStatus = {
status: 'queued' // means that instance is waiting to be started (see concurrency limits)
| 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running
| 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish
| 'waitingForPause' // instance is finishing the current work to pause
| 'unknown';
| 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running
| 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish
| 'waitingForPause' // instance is finishing the current work to pause
| 'unknown';
error?: {
name: string;
message: string;

View File

@@ -39,6 +39,14 @@ deleted_classes = ["AnilistDo"]
tag = "v4"
new_sqlite_classes = ["AnilistDo"]
[[migrations]]
tag = "v5"
deleted_classes = ["AnilistDo"]
[[migrations]]
tag = "v6"
new_sqlite_classes = ["AnilistDo"]
[[queues.producers]]
queue = "anilist-updates"
binding = "ANILIST_UPDATES"
@@ -59,7 +67,7 @@ id = "c8db249d8ee7462b91f9c374321776e4"
preview_id = "ff38240eb2aa4b1388c705f4974f5aec"
[triggers]
crons = ["0 */12 * * *"]
crons = ["0 */12 * * *", "0 18 * * *"]
[[d1_databases]]
binding = "DB"