refactor: Consolidate caching logic in Anilist DO by using handleCachedRequest for all data fetching methods and allowing dynamic TTL calculation.

This commit is contained in:
2025-12-07 08:20:55 -05:00
parent 67e07331a1
commit 9116a561c3

View File

@@ -1,6 +1,5 @@
import type { TypedDocumentNode } from "@graphql-typed-document-node/core"; import type { TypedDocumentNode } from "@graphql-typed-document-node/core";
import { DurableObject } from "cloudflare:workers"; import { DurableObject } from "cloudflare:workers";
import { type ResultOf } from "gql.tada";
import { print } from "graphql"; import { print } from "graphql";
import { z } from "zod"; import { z } from "zod";
@@ -40,59 +39,47 @@ export class AnilistDurableObject extends DurableObject {
} }
async getTitle(id: number, token?: string) { async getTitle(id: number, token?: string) {
const storageKey = id.toString(); return this.handleCachedRequest(
const cache = await this.state.storage.get(storageKey); `title:${id}`,
if (cache) { async () => {
return cache;
}
const anilistResponse = await this.fetchFromAnilist( const anilistResponse = await this.fetchFromAnilist(
GetTitleQuery, GetTitleQuery,
{ id }, { id },
token, token,
); );
return anilistResponse?.Media ?? null;
// Extract next airing episode for alarm },
const media = anilistResponse.Media as ResultOf< (media) => {
typeof GetTitleQuery if (!media) return undefined;
>["Media"];
// Cast to any to access fragment fields without unmasking // Cast to any to access fragment fields without unmasking
const nextAiringEpisode = nextAiringEpisodeSchema.parse( const nextAiringEpisode = nextAiringEpisodeSchema.parse(
(media as any)?.nextAiringEpisode, (media as any)?.nextAiringEpisode,
); );
const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000; const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000;
await this.state.storage.put(storageKey, media);
if (airingAt) { if (airingAt) {
await this.state.storage.setAlarm(airingAt); return airingAt - Date.now();
await this.state.storage.put(`alarm:${id}`, airingAt);
} }
return undefined;
return media; },
);
} }
async getNextEpisodeAiringAt(id: number) { async getNextEpisodeAiringAt(id: number) {
const storageKey = `next_airing:${id}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `next_airing:${id}`,
async () => { async () => {
const data = await this.fetchFromAnilist(GetNextEpisodeAiringAtQuery, { const data = await this.fetchFromAnilist(GetNextEpisodeAiringAtQuery, {
id, id,
}); });
return data?.Media; return data?.Media;
}, },
TTL, 60 * 60 * 1000,
); );
} }
async search(query: string, page: number, limit: number) { async search(query: string, page: number, limit: number) {
const storageKey = `search:${JSON.stringify({ query, page, limit })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `search:${JSON.stringify({ query, page, limit })}`,
async () => { async () => {
const data = await this.fetchFromAnilist(SearchQuery, { const data = await this.fetchFromAnilist(SearchQuery, {
query, query,
@@ -101,7 +88,7 @@ export class AnilistDurableObject extends DurableObject {
}); });
return data?.Page; return data?.Page;
}, },
TTL, 60 * 60 * 1000,
); );
} }
@@ -112,15 +99,10 @@ export class AnilistDurableObject extends DurableObject {
nextYear: number, nextYear: number,
limit: number, limit: number,
) { ) {
// No caching for browse popular as it returns a Response object in the original code? return this.handleCachedRequest(
// Wait, the original code had caching logic but it was commented out or mixed? `popular:${JSON.stringify({ season, seasonYear, nextSeason, nextYear, limit })}`,
// The original code returned a Response directly for BrowsePopular without caching in the switch case, async () => {
// but then had a cached block below it which was unreachable. console.log(nextSeason, nextYear, print(BrowsePopularQuery));
// I will implement it without caching for now as per the effective behavior, or maybe add caching.
// Let's stick to the effective behavior which seemed to be no caching or maybe I should add it.
// The original code:
// return new Response(JSON.stringify(await this.fetchFromAnilist(BrowsePopularQuery, variables)), ...);
return this.fetchFromAnilist(BrowsePopularQuery, { return this.fetchFromAnilist(BrowsePopularQuery, {
season, season,
seasonYear, seasonYear,
@@ -128,13 +110,14 @@ export class AnilistDurableObject extends DurableObject {
nextYear, nextYear,
limit, limit,
}); });
},
24 * 60 * 60 * 1000,
);
} }
async nextSeasonPopular(nextSeason: any, nextYear: number, limit: number) { async nextSeasonPopular(nextSeason: any, nextYear: number, limit: number) {
const storageKey = `next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`,
async () => { async () => {
return this.fetchFromAnilist(NextSeasonPopularQuery, { return this.fetchFromAnilist(NextSeasonPopularQuery, {
nextSeason, nextSeason,
@@ -142,7 +125,7 @@ export class AnilistDurableObject extends DurableObject {
limit, limit,
}); });
}, },
TTL, 24 * 60 * 60 * 1000,
); );
} }
@@ -152,13 +135,8 @@ export class AnilistDurableObject extends DurableObject {
season: any, season: any,
seasonYear: number, seasonYear: number,
) { ) {
// The original code had unreachable cache logic.
// I will implement it with caching if possible, but let's follow the pattern.
// Actually, let's enable caching as it seems intended.
const storageKey = `popular:${JSON.stringify({ page, limit, season, seasonYear })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `popular:${JSON.stringify({ page, limit, season, seasonYear })}`,
async () => { async () => {
const data = await this.fetchFromAnilist(GetPopularTitlesQuery, { const data = await this.fetchFromAnilist(GetPopularTitlesQuery, {
page, page,
@@ -168,15 +146,13 @@ export class AnilistDurableObject extends DurableObject {
}); });
return data?.Page; return data?.Page;
}, },
TTL, 24 * 60 * 60 * 1000,
); );
} }
async getTrendingTitles(page: number, limit: number) { async getTrendingTitles(page: number, limit: number) {
const storageKey = `trending:${JSON.stringify({ page, limit })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `trending:${JSON.stringify({ page, limit })}`,
async () => { async () => {
const data = await this.fetchFromAnilist(GetTrendingTitlesQuery, { const data = await this.fetchFromAnilist(GetTrendingTitlesQuery, {
page, page,
@@ -184,7 +160,7 @@ export class AnilistDurableObject extends DurableObject {
}); });
return data?.Page; return data?.Page;
}, },
TTL, 24 * 60 * 60 * 1000,
); );
} }
@@ -193,10 +169,8 @@ export class AnilistDurableObject extends DurableObject {
airingAtLowerBound: number, airingAtLowerBound: number,
airingAtUpperBound: number, airingAtUpperBound: number,
) { ) {
const storageKey = `upcoming:${JSON.stringify({ page, airingAtLowerBound, airingAtUpperBound })}`;
const TTL = 60 * 60 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `upcoming:${JSON.stringify({ page, airingAtLowerBound, airingAtUpperBound })}`,
async () => { async () => {
const data = await this.fetchFromAnilist(GetUpcomingTitlesQuery, { const data = await this.fetchFromAnilist(GetUpcomingTitlesQuery, {
page, page,
@@ -205,31 +179,34 @@ export class AnilistDurableObject extends DurableObject {
}); });
return data?.Page; return data?.Page;
}, },
TTL, 24 * 60 * 60 * 1000,
); );
} }
async getUser(token: string) { async getUser(token: string) {
const storageKey = `user:${token}`;
// 1 month
const TTL = 60 * 60 * 24 * 30 * 1000;
return this.handleCachedRequest( return this.handleCachedRequest(
storageKey, `user:${token}`,
async () => { async () => {
const data = await this.fetchFromAnilist(GetUserQuery, {}, token); const data = await this.fetchFromAnilist(GetUserQuery, {}, token);
return data?.Viewer; return data?.Viewer;
}, },
TTL, 60 * 60 * 24 * 30 * 1000,
); );
} }
async getUserProfile(token: string) { async getUserProfile(token: string) {
return this.handleCachedRequest(
`user_profile:${token}`,
async () => {
const data = await this.fetchFromAnilist( const data = await this.fetchFromAnilist(
GetUserProfileQuery, GetUserProfileQuery,
{ token }, { token },
token, token,
); );
return data?.Viewer; return data?.Viewer;
},
60 * 60 * 24 * 30 * 1000,
);
} }
async markEpisodeAsWatched( async markEpisodeAsWatched(
@@ -258,18 +235,21 @@ export class AnilistDurableObject extends DurableObject {
async handleCachedRequest<T>( async handleCachedRequest<T>(
key: string, key: string,
fetcher: () => Promise<T>, fetcher: () => Promise<T>,
ttl?: number, ttl?: number | ((data: T) => number | undefined),
) { ) {
const cache = await this.state.storage.get(key); const cache = await this.state.storage.get(key);
console.debug(`Retrieving request ${key} from cache:`, cache != null);
if (cache) { if (cache) {
return cache; return cache as T;
} }
const result = await fetcher(); const result = await fetcher();
await this.state.storage.put(key, result); await this.state.storage.put(key, result);
if (ttl) { const calculatedTtl = typeof ttl === "function" ? ttl(result) : ttl;
const alarmTime = Date.now() + ttl;
if (calculatedTtl && calculatedTtl > 0) {
const alarmTime = Date.now() + calculatedTtl;
await this.state.storage.setAlarm(alarmTime); await this.state.storage.setAlarm(alarmTime);
await this.state.storage.put(`alarm:${key}`, alarmTime); await this.state.storage.put(`alarm:${key}`, alarmTime);
} }