refactor: Consolidate caching logic in Anilist DO by using handleCachedRequest for all data fetching methods and allowing dynamic TTL calculation.

This commit is contained in:
2025-12-07 08:20:55 -05:00
parent 67e07331a1
commit 9116a561c3

View File

@@ -1,6 +1,5 @@
import type { TypedDocumentNode } from "@graphql-typed-document-node/core";
import { DurableObject } from "cloudflare:workers";
import { type ResultOf } from "gql.tada";
import { print } from "graphql";
import { z } from "zod";
@@ -40,59 +39,47 @@ export class AnilistDurableObject extends DurableObject {
}
async getTitle(id: number, token?: string) {
const storageKey = id.toString();
const cache = await this.state.storage.get(storageKey);
if (cache) {
return cache;
}
const anilistResponse = await this.fetchFromAnilist(
GetTitleQuery,
{ id },
token,
return this.handleCachedRequest(
`title:${id}`,
async () => {
const anilistResponse = await this.fetchFromAnilist(
GetTitleQuery,
{ id },
token,
);
return anilistResponse?.Media ?? null;
},
(media) => {
if (!media) return undefined;
// Cast to any to access fragment fields without unmasking
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
(media as any)?.nextAiringEpisode,
);
const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000;
if (airingAt) {
return airingAt - Date.now();
}
return undefined;
},
);
// Extract next airing episode for alarm
const media = anilistResponse.Media as ResultOf<
typeof GetTitleQuery
>["Media"];
// Cast to any to access fragment fields without unmasking
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
(media as any)?.nextAiringEpisode,
);
const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000;
await this.state.storage.put(storageKey, media);
if (airingAt) {
await this.state.storage.setAlarm(airingAt);
await this.state.storage.put(`alarm:${id}`, airingAt);
}
return media;
}
async getNextEpisodeAiringAt(id: number) {
const storageKey = `next_airing:${id}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest(
storageKey,
`next_airing:${id}`,
async () => {
const data = await this.fetchFromAnilist(GetNextEpisodeAiringAtQuery, {
id,
});
return data?.Media;
},
TTL,
60 * 60 * 1000,
);
}
async search(query: string, page: number, limit: number) {
const storageKey = `search:${JSON.stringify({ query, page, limit })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest(
storageKey,
`search:${JSON.stringify({ query, page, limit })}`,
async () => {
const data = await this.fetchFromAnilist(SearchQuery, {
query,
@@ -101,7 +88,7 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
TTL,
60 * 60 * 1000,
);
}
@@ -112,29 +99,25 @@ export class AnilistDurableObject extends DurableObject {
nextYear: number,
limit: number,
) {
// No caching for browse popular as it returns a Response object in the original code?
// Wait, the original code had caching logic but it was commented out or mixed?
// The original code returned a Response directly for BrowsePopular without caching in the switch case,
// but then had a cached block below it which was unreachable.
// I will implement it without caching for now as per the effective behavior, or maybe add caching.
// Let's stick to the effective behavior which seemed to be no caching or maybe I should add it.
// The original code:
// return new Response(JSON.stringify(await this.fetchFromAnilist(BrowsePopularQuery, variables)), ...);
return this.fetchFromAnilist(BrowsePopularQuery, {
season,
seasonYear,
nextSeason,
nextYear,
limit,
});
return this.handleCachedRequest(
`popular:${JSON.stringify({ season, seasonYear, nextSeason, nextYear, limit })}`,
async () => {
console.log(nextSeason, nextYear, print(BrowsePopularQuery));
return this.fetchFromAnilist(BrowsePopularQuery, {
season,
seasonYear,
nextSeason,
nextYear,
limit,
});
},
24 * 60 * 60 * 1000,
);
}
async nextSeasonPopular(nextSeason: any, nextYear: number, limit: number) {
const storageKey = `next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest(
storageKey,
`next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`,
async () => {
return this.fetchFromAnilist(NextSeasonPopularQuery, {
nextSeason,
@@ -142,7 +125,7 @@ export class AnilistDurableObject extends DurableObject {
limit,
});
},
TTL,
24 * 60 * 60 * 1000,
);
}
@@ -152,13 +135,8 @@ export class AnilistDurableObject extends DurableObject {
season: any,
seasonYear: number,
) {
// The original code had unreachable cache logic.
// I will implement it with caching if possible, but let's follow the pattern.
// Actually, let's enable caching as it seems intended.
const storageKey = `popular:${JSON.stringify({ page, limit, season, seasonYear })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest(
storageKey,
`popular:${JSON.stringify({ page, limit, season, seasonYear })}`,
async () => {
const data = await this.fetchFromAnilist(GetPopularTitlesQuery, {
page,
@@ -168,15 +146,13 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
TTL,
24 * 60 * 60 * 1000,
);
}
async getTrendingTitles(page: number, limit: number) {
const storageKey = `trending:${JSON.stringify({ page, limit })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest(
storageKey,
`trending:${JSON.stringify({ page, limit })}`,
async () => {
const data = await this.fetchFromAnilist(GetTrendingTitlesQuery, {
page,
@@ -184,7 +160,7 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
TTL,
24 * 60 * 60 * 1000,
);
}
@@ -193,10 +169,8 @@ export class AnilistDurableObject extends DurableObject {
airingAtLowerBound: number,
airingAtUpperBound: number,
) {
const storageKey = `upcoming:${JSON.stringify({ page, airingAtLowerBound, airingAtUpperBound })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest(
storageKey,
`upcoming:${JSON.stringify({ page, airingAtLowerBound, airingAtUpperBound })}`,
async () => {
const data = await this.fetchFromAnilist(GetUpcomingTitlesQuery, {
page,
@@ -205,31 +179,34 @@ export class AnilistDurableObject extends DurableObject {
});
return data?.Page;
},
TTL,
24 * 60 * 60 * 1000,
);
}
async getUser(token: string) {
const storageKey = `user:${token}`;
// 1 month
const TTL = 60 * 60 * 24 * 30 * 1000;
return this.handleCachedRequest(
storageKey,
`user:${token}`,
async () => {
const data = await this.fetchFromAnilist(GetUserQuery, {}, token);
return data?.Viewer;
},
TTL,
60 * 60 * 24 * 30 * 1000,
);
}
async getUserProfile(token: string) {
const data = await this.fetchFromAnilist(
GetUserProfileQuery,
{ token },
token,
return this.handleCachedRequest(
`user_profile:${token}`,
async () => {
const data = await this.fetchFromAnilist(
GetUserProfileQuery,
{ token },
token,
);
return data?.Viewer;
},
60 * 60 * 24 * 30 * 1000,
);
return data?.Viewer;
}
async markEpisodeAsWatched(
@@ -258,18 +235,21 @@ export class AnilistDurableObject extends DurableObject {
async handleCachedRequest<T>(
key: string,
fetcher: () => Promise<T>,
ttl?: number,
ttl?: number | ((data: T) => number | undefined),
) {
const cache = await this.state.storage.get(key);
console.debug(`Retrieving request ${key} from cache:`, cache != null);
if (cache) {
return cache;
return cache as T;
}
const result = await fetcher();
await this.state.storage.put(key, result);
if (ttl) {
const alarmTime = Date.now() + ttl;
const calculatedTtl = typeof ttl === "function" ? ttl(result) : ttl;
if (calculatedTtl && calculatedTtl > 0) {
const alarmTime = Date.now() + calculatedTtl;
await this.state.storage.setAlarm(alarmTime);
await this.state.storage.put(`alarm:${key}`, alarmTime);
}