refactor: introduce a script runner for local development

This commit is contained in:
2025-11-28 16:29:16 +08:00
parent 6ff1d96017
commit 00e1f82d85
4 changed files with 207 additions and 114 deletions

View File

@@ -1,10 +1,14 @@
import { env } from "cloudflare:workers";
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle"; import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
import { promiseQueue } from "~/libs/promiseQueue"; import { promiseQueue } from "~/libs/promiseQueue";
import { queueTask } from "~/libs/tasks/queueTask"; import { queueTask } from "~/libs/tasks/queueTask";
import { getDb } from "~/models/db"; import { getDb } from "~/models/db";
import { watchStatusTable } from "~/models/schema"; import { watchStatusTable } from "~/models/schema";
const args = new Set(process.argv.slice(2)); export default {
async fetch() {
const args = new Set(env.args);
const isDevMode = args.has("--dev"); const isDevMode = args.has("--dev");
const shouldTriggerLatestEpisode = args.has("--trigger-latest-episode"); const shouldTriggerLatestEpisode = args.has("--trigger-latest-episode");
@@ -26,8 +30,10 @@ await getTitleIds().then((titles) =>
), ),
); );
return new Response(JSON.stringify(true));
function getTitleIds() { function getTitleIds() {
return getDb(process.env) return getDb()
.selectDistinct({ titleId: watchStatusTable.titleId }) .selectDistinct({ titleId: watchStatusTable.titleId })
.from(watchStatusTable) .from(watchStatusTable)
.all() .all()
@@ -59,7 +65,9 @@ async function triggerNextEpisodeRoute(titleId: number) {
return -1; return -1;
} }
return Math.max(...result.episodes.map((episode) => episode.number)); return Math.max(
...result.episodes.map((episode) => episode.number),
);
}) })
.then((mostRecentEpisodeNumber) => { .then((mostRecentEpisodeNumber) => {
if (mostRecentEpisodeNumber === -1) { if (mostRecentEpisodeNumber === -1) {
@@ -112,14 +120,13 @@ async function triggerNextEpisodeRoute(titleId: number) {
} }
return queueTask( return queueTask(
"new-episode", "NEW_EPISODE",
{ {
aniListId: titleId, aniListId: titleId,
episodeNumber: title.nextAiringEpisode.episode, episodeNumber: title.nextAiringEpisode.episode,
}, },
{ {
scheduleConfig: { epochTime: title.nextAiringEpisode.airingAt }, scheduleConfig: { epochTime: title.nextAiringEpisode.airingAt },
env: process.env,
}, },
) )
.then(() => true) .then(() => true)
@@ -131,3 +138,5 @@ async function triggerNextEpisodeRoute(titleId: number) {
return false; return false;
}); });
} }
},
};

29
src/scripts/ipCheck.ts Normal file
View File

@@ -0,0 +1,29 @@
// import { GraphQLClient } from "graphql-request";
import { HttpsProxyAgent } from "https-proxy-agent";
import nodeFetch from "node-fetch";
// import { GetTitleQuery } from "../libs/anilist/getTitle.ts";
const agent = new HttpsProxyAgent(
"http://ruru:pdh!CQB@kpc3vyb3cwc@45.56.108.251:3128",
);
const response = await nodeFetch("https://httpbin.org/ip", { agent });
console.log(await response.text());
console.log(response.status);
console.log(nodeFetch);
// const client = new GraphQLClient("https://graphql.anilist.co/", {
// fetch: (input, init) => {
// console.log("custom fetch");
// const agent = new HttpsProxyAgent(
// "http://ruru:pdh!CQB@kpc3vyb3cwc@45.56.108.251:3128",
// );
// return nodeFetch(input, { ...init, agent });
// },
// });
// console.log(
// await client
// .request(GetTitleQuery, { id: 186794 })
// .then((data) => data?.Media ?? undefined),
// );

View File

@@ -1,4 +1,10 @@
import { env } from "cloudflare:workers";
import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials"; import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
import { getGoogleAuthToken } from "~/libs/gcloud/getGoogleAuthToken"; import { getGoogleAuthToken } from "~/libs/gcloud/getGoogleAuthToken";
console.log(await getGoogleAuthToken(getAdminSdkCredentials(process.env))); export default {
async fetch() {
return new Response(await getGoogleAuthToken(getAdminSdkCredentials(env)));
},
};

View File

@@ -0,0 +1,49 @@
import { readD1Migrations } from "@cloudflare/vitest-pool-workers/config";
import dotenv from "dotenv";
import * as esbuild from "esbuild";
import { readFile } from "fs/promises";
import { Miniflare } from "miniflare";
import * as path from "node:path";
import * as process from "node:process";
const script = process.argv[2];
const fileName = script.split("/").at(-1)?.split(".").at(0);
const outputFilePath = `./dist/${fileName}.js`;
const args = process.argv.slice(3);
await esbuild.build({
entryPoints: [script],
bundle: true,
outfile: outputFilePath,
platform: "node",
packages: "bundle",
external: ["cloudflare:workers"],
format: "esm",
});
const mf = new Miniflare({
scriptPath: outputFilePath,
modules: true,
compatibilityFlags: ["nodejs_compat"],
compatibilityDate: "2025-11-14",
// bindings: { ...dotenv.parse(await readFile(".dev.vars")), args },
// envPath: '.dev.vars',
d1Databases: {
DB: {
id: "5083d01d-7444-4336-a629-7c3e2002b13d",
},
},
});
const d1Database = await mf.getD1Database("DB");
const migrations = await readD1Migrations(path.join(process.cwd(), "drizzle"));
await d1Database.batch(
migrations.flatMap(({ queries }) =>
queries.map((query) => d1Database.prepare(query)),
),
);
const res = await mf.dispatchFetch("http://localhost:8787");
console.log(await res.text());
await mf.dispose();