refactor: introduce a script runner for local development

This commit is contained in:
2025-11-28 16:29:16 +08:00
parent 6ff1d96017
commit 00e1f82d85
4 changed files with 207 additions and 114 deletions

View File

@@ -1,133 +1,142 @@
import { env } from "cloudflare:workers";
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
import { promiseQueue } from "~/libs/promiseQueue";
import { queueTask } from "~/libs/tasks/queueTask";
import { getDb } from "~/models/db";
import { watchStatusTable } from "~/models/schema";
const args = new Set(process.argv.slice(2));
export default {
async fetch() {
const args = new Set(env.args);
const isDevMode = args.has("--dev");
const shouldTriggerLatestEpisode = args.has("--trigger-latest-episode");
const isDevMode = args.has("--dev");
const shouldTriggerLatestEpisode = args.has("--trigger-latest-episode");
if (isDevMode) {
console.log("Running in dev mode");
}
if (isDevMode) {
console.log("Running in dev mode");
}
await getTitleIds().then((titles) =>
promiseQueue(
titles.map(
(title) => () =>
triggerNextEpisodeRoute(title).then((success) =>
console.log(
`Triggered next episode route for title ${title}: ${success}`,
),
await getTitleIds().then((titles) =>
promiseQueue(
titles.map(
(title) => () =>
triggerNextEpisodeRoute(title).then((success) =>
console.log(
`Triggered next episode route for title ${title}: ${success}`,
),
),
),
),
),
);
),
);
function getTitleIds() {
return getDb(process.env)
.selectDistinct({ titleId: watchStatusTable.titleId })
.from(watchStatusTable)
.all()
.then((titles) => titles.map((title) => title.titleId));
}
return new Response(JSON.stringify(true));
async function triggerNextEpisodeRoute(titleId: number) {
let title;
try {
title = await fetchTitleFromAnilist(titleId);
} catch (error) {
console.error(`Failed to fetch title ${titleId}`, error);
return false;
}
if (!title) {
console.error(`Failed to fetch title ${titleId}`);
return false;
}
function getTitleIds() {
return getDb()
.selectDistinct({ titleId: watchStatusTable.titleId })
.from(watchStatusTable)
.all()
.then((titles) => titles.map((title) => title.titleId));
}
if (isDevMode || shouldTriggerLatestEpisode) {
const serverUrl = isDevMode
? "http://127.0.0.1:8080"
: "https://aniplay-v2.rururu.workers.dev";
const wasSuccessful = await fetch(`${serverUrl}/episodes/${titleId}`)
.then((res) => res.json())
.then(({ success, result }) => {
if (!success) {
console.error(`Failed to fetch episodes for title ${titleId}`);
return -1;
}
async function triggerNextEpisodeRoute(titleId: number) {
let title;
try {
title = await fetchTitleFromAnilist(titleId);
} catch (error) {
console.error(`Failed to fetch title ${titleId}`, error);
return false;
}
if (!title) {
console.error(`Failed to fetch title ${titleId}`);
return false;
}
return Math.max(...result.episodes.map((episode) => episode.number));
})
.then((mostRecentEpisodeNumber) => {
if (mostRecentEpisodeNumber === -1) {
if (isDevMode || shouldTriggerLatestEpisode) {
const serverUrl = isDevMode
? "http://127.0.0.1:8080"
: "https://aniplay-v2.rururu.workers.dev";
const wasSuccessful = await fetch(`${serverUrl}/episodes/${titleId}`)
.then((res) => res.json())
.then(({ success, result }) => {
if (!success) {
console.error(`Failed to fetch episodes for title ${titleId}`);
return -1;
}
return Math.max(
...result.episodes.map((episode) => episode.number),
);
})
.then((mostRecentEpisodeNumber) => {
if (mostRecentEpisodeNumber === -1) {
return false;
}
return fetch(`${serverUrl}/internal/new-episode`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
aniListId: titleId,
episodeNumber: mostRecentEpisodeNumber,
}),
})
.then(
(res) =>
res.json() as Promise<{ success: boolean; message?: string }>,
)
.then((response) => {
const { success } = response;
if (!success) {
console.error(
`Failed to trigger next episode route for title ${titleId} (most recent episode: ${mostRecentEpisodeNumber})`,
response,
);
}
return success;
});
});
if (!wasSuccessful) {
return false;
}
return fetch(`${serverUrl}/internal/new-episode`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
aniListId: titleId,
episodeNumber: mostRecentEpisodeNumber,
}),
})
.then(
(res) =>
res.json() as Promise<{ success: boolean; message?: string }>,
)
.then((response) => {
const { success } = response;
if (!success) {
console.error(
`Failed to trigger next episode route for title ${titleId} (most recent episode: ${mostRecentEpisodeNumber})`,
response,
);
}
console.log(
`Triggered next episode route for title ${titleId} (most recent episode)`,
);
return success;
});
});
if (isDevMode) {
return true;
}
}
if (!wasSuccessful) {
return false;
if (!title.nextAiringEpisode) {
console.log(`Title ${titleId} has no next airing episode`);
return true;
}
return queueTask(
"NEW_EPISODE",
{
aniListId: titleId,
episodeNumber: title.nextAiringEpisode.episode,
},
{
scheduleConfig: { epochTime: title.nextAiringEpisode.airingAt },
},
)
.then(() => true)
.catch((error) => {
console.error(
`Failed to trigger next episode route for title ${titleId}`,
error,
);
return false;
});
}
console.log(
`Triggered next episode route for title ${titleId} (most recent episode)`,
);
if (isDevMode) {
return true;
}
}
if (!title.nextAiringEpisode) {
console.log(`Title ${titleId} has no next airing episode`);
return true;
}
return queueTask(
"new-episode",
{
aniListId: titleId,
episodeNumber: title.nextAiringEpisode.episode,
},
{
scheduleConfig: { epochTime: title.nextAiringEpisode.airingAt },
env: process.env,
},
)
.then(() => true)
.catch((error) => {
console.error(
`Failed to trigger next episode route for title ${titleId}`,
error,
);
return false;
});
}
},
};

29
src/scripts/ipCheck.ts Normal file
View File

@@ -0,0 +1,29 @@
// import { GraphQLClient } from "graphql-request";
import { HttpsProxyAgent } from "https-proxy-agent";
import nodeFetch from "node-fetch";
// import { GetTitleQuery } from "../libs/anilist/getTitle.ts";
const agent = new HttpsProxyAgent(
"http://ruru:pdh!CQB@kpc3vyb3cwc@45.56.108.251:3128",
);
const response = await nodeFetch("https://httpbin.org/ip", { agent });
console.log(await response.text());
console.log(response.status);
console.log(nodeFetch);
// const client = new GraphQLClient("https://graphql.anilist.co/", {
// fetch: (input, init) => {
// console.log("custom fetch");
// const agent = new HttpsProxyAgent(
// "http://ruru:pdh!CQB@kpc3vyb3cwc@45.56.108.251:3128",
// );
// return nodeFetch(input, { ...init, agent });
// },
// });
// console.log(
// await client
// .request(GetTitleQuery, { id: 186794 })
// .then((data) => data?.Media ?? undefined),
// );

View File

@@ -1,4 +1,10 @@
import { env } from "cloudflare:workers";
import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
import { getGoogleAuthToken } from "~/libs/gcloud/getGoogleAuthToken";
console.log(await getGoogleAuthToken(getAdminSdkCredentials(process.env)));
export default {
async fetch() {
return new Response(await getGoogleAuthToken(getAdminSdkCredentials(env)));
},
};

View File

@@ -0,0 +1,49 @@
import { readD1Migrations } from "@cloudflare/vitest-pool-workers/config";
import dotenv from "dotenv";
import * as esbuild from "esbuild";
import { readFile } from "fs/promises";
import { Miniflare } from "miniflare";
import * as path from "node:path";
import * as process from "node:process";
const script = process.argv[2];
const fileName = script.split("/").at(-1)?.split(".").at(0);
const outputFilePath = `./dist/${fileName}.js`;
const args = process.argv.slice(3);
await esbuild.build({
entryPoints: [script],
bundle: true,
outfile: outputFilePath,
platform: "node",
packages: "bundle",
external: ["cloudflare:workers"],
format: "esm",
});
const mf = new Miniflare({
scriptPath: outputFilePath,
modules: true,
compatibilityFlags: ["nodejs_compat"],
compatibilityDate: "2025-11-14",
// bindings: { ...dotenv.parse(await readFile(".dev.vars")), args },
// envPath: '.dev.vars',
d1Databases: {
DB: {
id: "5083d01d-7444-4336-a629-7c3e2002b13d",
},
},
});
const d1Database = await mf.getD1Database("DB");
const migrations = await readD1Migrations(path.join(process.cwd(), "drizzle"));
await d1Database.batch(
migrations.flatMap(({ queries }) =>
queries.map((query) => d1Database.prepare(query)),
),
);
const res = await mf.dispatchFetch("http://localhost:8787");
console.log(await res.text());
await mf.dispose();