Compare commits

..

6 Commits

7 changed files with 211 additions and 238 deletions

View File

@@ -6,6 +6,7 @@
"type": "module",
"scripts": {
"dev": "wrangler dev src/index.ts --port 8080",
"deploy": "wrangler deploy --minify src/index.ts",
"env:generate": "tsx src/scripts/generateEnv.ts",
"env:verify": "tsx src/scripts/verifyEnv.ts",
"db:generate": "drizzle-kit generate",

View File

@@ -1,204 +1,158 @@
import { env } from "cloudflare:test";
import { DateTime } from "luxon";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { getTestEnv } from "../test/getTestEnv";
import { processDelayedTasks } from "./processDelayedTasks";
describe("processDelayedTasks", () => {
let mockEnv: Cloudflare.Env;
let mockCtx: ExecutionContext;
let kvGetSpy: ReturnType<typeof vi.fn>;
let kvDeleteSpy: ReturnType<typeof vi.fn>;
let kvPutSpy: ReturnType<typeof vi.fn>;
let queueSendSpy: ReturnType<typeof vi.fn>;
beforeEach(() => {
kvGetSpy = vi.fn(() => Promise.resolve(null));
kvDeleteSpy = vi.fn(() => Promise.resolve());
kvPutSpy = vi.fn(() => Promise.resolve());
queueSendSpy = vi.fn(() => Promise.resolve());
mockEnv = {
DELAYED_TASKS: {
get: kvGetSpy,
delete: kvDeleteSpy,
put: kvPutSpy,
list: vi.fn(() =>
Promise.resolve({
keys: [],
list_complete: true as const,
cacheStatus: null,
}),
),
getWithMetadata: vi.fn(() =>
Promise.resolve({ value: null, metadata: null }),
),
} as any,
NEW_EPISODE: {
send: queueSendSpy,
} as any,
ANILIST_UPDATES: {
send: vi.fn(() => Promise.resolve()),
} as any,
} as any;
mockCtx = {
waitUntil: vi.fn(() => {}),
passThroughOnException: vi.fn(() => {}),
} as any;
beforeEach(async () => {
const tasksToDelete = await env.DELAYED_TASKS.list({
prefix: "delayed-task:",
});
console.log(`Found ${tasksToDelete.keys.length} tasks to delete`);
for (const task of tasksToDelete.keys) {
await env.DELAYED_TASKS.delete(task.name);
}
});
it("handles empty KV namespace", async () => {
await processDelayedTasks(mockEnv, mockCtx);
await processDelayedTasks(env);
expect(kvDeleteSpy).not.toHaveBeenCalled();
expect(queueSendSpy).not.toHaveBeenCalled();
await expect(
env.DELAYED_TASKS.list({ prefix: "delayed-task:" }).then(
(result) => result.keys,
),
).resolves.toHaveLength(0);
});
it("queues tasks within 12 hours of scheduled time", async () => {
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 6 * 3600; // 6 hours from now
it("queues tasks within 9 hours of scheduled time", async () => {
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 6 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 123, episodeNumber: 1 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-1",
createdAt: now - 18 * 3600,
createdAt: now.minus({ hours: 18 }).toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-1` }],
list_complete: true as const,
cacheStatus: null,
}),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
await processDelayedTasks(mockEnv, mockCtx);
expect(queueSendSpy).toHaveBeenCalledTimes(1);
expect(kvDeleteSpy).toHaveBeenCalledTimes(1);
expect(kvDeleteSpy).toHaveBeenCalledWith(
await env.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-1`,
JSON.stringify(taskMetadata),
);
await processDelayedTasks(env);
await expect(
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-1`),
).resolves.toBeNull();
});
it("does not queue tasks beyond 12 hours", async () => {
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 24 * 3600; // 24 hours from now
it("does not queue tasks beyond 9 hours", async () => {
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 24 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 456, episodeNumber: 2 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-2",
createdAt: now,
createdAt: now.toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-2` }],
list_complete: true as const,
cacheStatus: null,
}),
await env.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-2`,
JSON.stringify(taskMetadata),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
await processDelayedTasks(env);
await processDelayedTasks(mockEnv, mockCtx);
expect(queueSendSpy).not.toHaveBeenCalled();
expect(kvDeleteSpy).not.toHaveBeenCalled();
await expect(
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-2`),
).resolves.toBeTruthy();
});
it("increments retry count on queue failure", async () => {
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 1 * 3600; // 1 hour from now
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 789, episodeNumber: 3 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-3",
createdAt: now - 23 * 3600,
createdAt: now.minus({ hours: 23 }).toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-3` }],
list_complete: true as const,
cacheStatus: null,
}),
const mockEnv = getTestEnv({
NEW_EPISODE: {
send: vi.fn().mockRejectedValue(new Error("Queue error")),
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
},
});
await mockEnv.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-3`,
JSON.stringify(taskMetadata),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
queueSendSpy.mockRejectedValue(new Error("Queue error"));
await processDelayedTasks(mockEnv);
await processDelayedTasks(mockEnv, mockCtx);
expect(kvPutSpy).toHaveBeenCalledTimes(1);
const updatedMetadata = JSON.parse(kvPutSpy.mock.calls[0][1]);
const updatedMetadata = JSON.parse(
(await mockEnv.DELAYED_TASKS.get(
`delayed-task:${scheduledTime}:task-3`,
))!,
);
expect(updatedMetadata.retryCount).toBe(1);
expect(kvDeleteSpy).not.toHaveBeenCalled();
});
it("logs alert after 3 failed attempts", async () => {
const consoleErrorSpy = vi.fn(() => {});
const originalConsoleError = console.error;
console.error = consoleErrorSpy as any;
const now = Math.floor(Date.now() / 1000);
const scheduledTime = now + 1 * 3600;
const now = DateTime.now();
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
const taskMetadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 999, episodeNumber: 4 },
body: { aniListId: 789, episodeNumber: 4 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: scheduledTime,
taskId: "task-4",
createdAt: now - 23 * 3600,
retryCount: 2, // Will become 3 after this failure
createdAt: now.minus({ hours: 23 }).toSeconds(),
retryCount: 2,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: `delayed-task:${scheduledTime}:task-4` }],
list_complete: true as const,
cacheStatus: null,
}),
const mockEnv = getTestEnv({
NEW_EPISODE: {
send: vi.fn().mockRejectedValue(new Error("Queue error")),
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
},
});
await mockEnv.DELAYED_TASKS.put(
`delayed-task:${scheduledTime}:task-4`,
JSON.stringify(taskMetadata),
);
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
queueSendSpy.mockRejectedValue(new Error("Queue error"));
await processDelayedTasks(mockEnv, mockCtx);
await processDelayedTasks(mockEnv);
// Check that alert was logged
const alertCalls = consoleErrorSpy.mock.calls.filter((call: any) =>
call[0]?.includes("🚨 ALERT"),
);
expect(alertCalls.length).toBeGreaterThan(0);
console.error = originalConsoleError;
});
it("handles multiple tasks in single cron run", async () => {
const now = Math.floor(Date.now() / 1000);
const now = DateTime.now();
const task1Metadata = {
queueName: "NEW_EPISODE",
body: { aniListId: 100, episodeNumber: 1 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: now + 2 * 3600,
scheduledEpochTime: now.plus({ hours: 2 }).toSeconds(),
taskId: "task-1",
createdAt: now - 20 * 3600,
createdAt: now.minus({ hours: 20 }).toSeconds(),
retryCount: 0,
};
@@ -206,47 +160,53 @@ describe("processDelayedTasks", () => {
queueName: "NEW_EPISODE",
body: { aniListId: 200, episodeNumber: 2 },
headers: { "Content-Type": "application/json" },
scheduledEpochTime: now + 5 * 3600,
scheduledEpochTime: now.plus({ hours: 5 }).toSeconds(),
taskId: "task-2",
createdAt: now - 19 * 3600,
createdAt: now.minus({ hours: 19 }).toSeconds(),
retryCount: 0,
};
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [
{ name: `delayed-task:${task1Metadata.scheduledEpochTime}:task-1` },
{ name: `delayed-task:${task2Metadata.scheduledEpochTime}:task-2` },
],
list_complete: true as const,
cacheStatus: null,
}),
await env.DELAYED_TASKS.put(
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
JSON.stringify(task1Metadata),
);
await env.DELAYED_TASKS.put(
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
JSON.stringify(task2Metadata),
);
kvGetSpy
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task1Metadata)))
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task2Metadata)));
await processDelayedTasks(env);
await processDelayedTasks(mockEnv, mockCtx);
expect(queueSendSpy).toHaveBeenCalledTimes(2);
expect(kvDeleteSpy).toHaveBeenCalledTimes(2);
await expect(
env.DELAYED_TASKS.get(
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
),
).resolves.toBeNull();
await expect(
env.DELAYED_TASKS.get(
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
),
).resolves.toBeNull();
});
it("skips tasks with null values in KV", async () => {
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
Promise.resolve({
keys: [{ name: "delayed-task:123:invalid" }],
list_complete: true as const,
cacheStatus: null,
}),
);
const queueSendSpy = vi.fn().mockResolvedValue(undefined);
const mockEnv = getTestEnv({
NEW_EPISODE: {
send: queueSendSpy,
sendBatch: queueSendSpy,
},
ANILIST_UPDATES: {
send: queueSendSpy,
sendBatch: queueSendSpy,
},
});
await mockEnv.DELAYED_TASKS.put(`delayed-task:123:invalid`, null);
kvGetSpy.mockReturnValue(Promise.resolve(null));
await processDelayedTasks(mockEnv, mockCtx);
await processDelayedTasks(mockEnv);
expect(queueSendSpy).not.toHaveBeenCalled();
expect(kvDeleteSpy).not.toHaveBeenCalled();
await expect(
mockEnv.DELAYED_TASKS.get(`delayed-task:123:invalid`),
).resolves.toBeNull();
});
});

View File

@@ -2,15 +2,11 @@ import { DateTime } from "luxon";
import type { DelayedTaskMetadata } from "./delayedTask";
import { deserializeDelayedTask } from "./delayedTask";
import { queueTask } from "./queueTask";
import { MAX_DELAY_SECONDS, queueTask } from "./queueTask";
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
const RETRY_ALERT_THRESHOLD = 3;
export async function processDelayedTasks(
env: Cloudflare.Env,
ctx: ExecutionContext,
): Promise<void> {
export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
console.log("Starting delayed task processing cron job");
const kvNamespace = env.DELAYED_TASKS;
@@ -31,7 +27,7 @@ export async function processDelayedTasks(
console.log(`Found ${keys.length} delayed tasks to check`);
const currentTime = Math.floor(Date.now() / 1000);
const twelveHoursFromNow = currentTime + MAX_DELAY_SECONDS;
const maxQueueTime = currentTime + MAX_DELAY_SECONDS;
let processedCount = 0;
let queuedCount = 0;
@@ -40,16 +36,17 @@ export async function processDelayedTasks(
for (const key of keys) {
try {
const value = await kvNamespace.get(key.name);
if (!value) {
console.warn(`Task key ${key.name} has no value, skipping`);
if (!value || value == "null") {
console.warn(`Task key ${key.name} has no value, removing`);
await kvNamespace.delete(key.name);
continue;
}
const metadata: DelayedTaskMetadata = deserializeDelayedTask(value);
processedCount++;
// Check if task is ready to be queued (within 12 hours of scheduled time)
if (metadata.scheduledEpochTime <= twelveHoursFromNow) {
// Check if task is ready to be queued (within 9 hours of scheduled time)
if (metadata.scheduledEpochTime <= maxQueueTime) {
const remainingDelay = Math.max(
0,
metadata.scheduledEpochTime - currentTime,
@@ -100,7 +97,7 @@ export async function processDelayedTasks(
}
} else {
const hoursUntilReady =
(metadata.scheduledEpochTime - twelveHoursFromNow) / 3600;
(metadata.scheduledEpochTime - maxQueueTime) / 3600;
console.log(
`Task ${metadata.taskId} not ready yet (${hoursUntilReady.toFixed(1)} hours until queueable)`,
);

View File

@@ -30,6 +30,10 @@ interface QueueTaskOptionalArgs {
env?: Cloudflare.Env;
}
export const MAX_DELAY_SECONDS = Duration.fromObject({ hours: 9 }).as(
"seconds",
);
export async function queueTask(
queueName: QueueName,
body: QueueBody[QueueName],
@@ -42,8 +46,6 @@ export async function queueTask(
req?.header(),
);
const MAX_DELAY_SECONDS = Duration.fromObject({ hours: 9 }).as("seconds");
// If delay exceeds 9 hours, store in KV for later processing
if (scheduleTime > MAX_DELAY_SECONDS) {
if (!env || !env.DELAYED_TASKS) {

View File

@@ -8,10 +8,12 @@ export function getTestEnvVariables(): Cloudflare.Env {
export function getTestEnv({
ADMIN_SDK_JSON = '{"client_email": "test@test.com", "project_id": "test-26g38"}',
LOG_DB_QUERIES = "false",
...mockEnv
}: Partial<Cloudflare.Env> = {}): Cloudflare.Env {
return {
...env,
ADMIN_SDK_JSON,
LOG_DB_QUERIES,
...mockEnv,
};
}

View File

@@ -2,32 +2,32 @@
// Generated by Wrangler by running `wrangler types` (hash: df24977940a31745cb42d562b6645de2)
// Runtime types generated with workerd@1.20251210.0 2025-11-28 nodejs_compat
declare namespace Cloudflare {
interface GlobalProps {
mainModule: typeof import("./src/index");
durableNamespaces: "AnilistDo";
}
interface Env {
DELAYED_TASKS: KVNamespace;
ADMIN_SDK_JSON: string;
CLOUDFLARE_TOKEN: string;
CLOUDFLARE_D1_TOKEN: string;
CLOUDFLARE_ACCOUNT_ID: string;
CLOUDFLARE_DATABASE_ID: string;
PROXY_URL: string;
USE_MOCK_DATA: string;
LOG_DB_QUERIES: string;
ANILIST_DO: DurableObjectNamespace<import("./src/index").AnilistDo>;
DB: D1Database;
ANILIST_UPDATES: Queue;
NEW_EPISODE: Queue;
}
interface GlobalProps {
mainModule: typeof import("./src/index");
durableNamespaces: "AnilistDo";
}
interface Env {
DELAYED_TASKS: KVNamespace;
ADMIN_SDK_JSON: string;
CLOUDFLARE_TOKEN: string;
CLOUDFLARE_D1_TOKEN: string;
CLOUDFLARE_ACCOUNT_ID: string;
CLOUDFLARE_DATABASE_ID: string;
PROXY_URL: string;
USE_MOCK_DATA: string;
LOG_DB_QUERIES: string;
ANILIST_DO: DurableObjectNamespace<import("./src/index").AnilistDo>;
DB: D1Database;
ANILIST_UPDATES: Queue;
NEW_EPISODE: Queue;
}
}
interface Env extends Cloudflare.Env {}
interface Env extends Cloudflare.Env { }
type StringifyValues<EnvType extends Record<string, unknown>> = {
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
};
declare namespace NodeJS {
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> {}
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> { }
}
// Begin runtime types
@@ -1644,7 +1644,7 @@ declare abstract class Body {
*/
declare var Response: {
prototype: Response;
new (body?: BodyInit | null, init?: ResponseInit): Response;
new(body?: BodyInit | null, init?: ResponseInit): Response;
error(): Response;
redirect(url: string, status?: number): Response;
json(any: any, maybeInit?: (ResponseInit | Response)): Response;
@@ -2192,7 +2192,7 @@ interface ReadableStream<R = any> {
*/
declare const ReadableStream: {
prototype: ReadableStream;
new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
new <R = any>(underlyingSource?: UnderlyingSource<R>, strategy?: QueuingStrategy<R>): ReadableStream<R>;
};
/**
@@ -3034,7 +3034,7 @@ type WebSocketEventMap = {
*/
declare var WebSocket: {
prototype: WebSocket;
new (url: string, protocols?: (string[] | string)): WebSocket;
new(url: string, protocols?: (string[] | string)): WebSocket;
readonly READY_STATE_CONNECTING: number;
readonly CONNECTING: number;
readonly READY_STATE_OPEN: number;
@@ -3091,7 +3091,7 @@ interface WebSocket extends EventTarget<WebSocketEventMap> {
extensions: string | null;
}
declare const WebSocketPair: {
new (): {
new(): {
0: WebSocket;
1: WebSocket;
};
@@ -9413,21 +9413,21 @@ interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder {
certNotAfter: "";
}
/** Possible outcomes of TLS verification */
declare type CertVerificationStatus =
/** Authentication succeeded */
"SUCCESS"
/** No certificate was presented */
| "NONE"
/** Failed because the certificate was self-signed */
| "FAILED:self signed certificate"
/** Failed because the certificate failed a trust chain check */
| "FAILED:unable to verify the first certificate"
/** Failed because the certificate not yet valid */
| "FAILED:certificate is not yet valid"
/** Failed because the certificate is expired */
| "FAILED:certificate has expired"
/** Failed for another unspecified reason */
| "FAILED";
declare type CertVerificationStatus =
/** Authentication succeeded */
"SUCCESS"
/** No certificate was presented */
| "NONE"
/** Failed because the certificate was self-signed */
| "FAILED:self signed certificate"
/** Failed because the certificate failed a trust chain check */
| "FAILED:unable to verify the first certificate"
/** Failed because the certificate not yet valid */
| "FAILED:certificate is not yet valid"
/** Failed because the certificate is expired */
| "FAILED:certificate has expired"
/** Failed for another unspecified reason */
| "FAILED";
/**
* An upstream endpoint's response to a TCP `keepalive` message from Cloudflare.
*/
@@ -9477,15 +9477,15 @@ interface D1ExecResult {
count: number;
duration: number;
}
type D1SessionConstraint =
// Indicates that the first query should go to the primary, and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
'first-primary'
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
| 'first-unconstrained';
type D1SessionConstraint =
// Indicates that the first query should go to the primary, and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
'first-primary'
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
// using the same D1DatabaseSession will go to any replica that is consistent with
// the bookmark maintained by the session (returned by the first query).
| 'first-unconstrained';
type D1SessionBookmark = string;
declare abstract class D1Database {
prepare(query: string): D1PreparedStatement;
@@ -9599,7 +9599,7 @@ declare type EmailExportedHandler<Env = unknown> = (message: ForwardableEmailMes
declare module "cloudflare:email" {
let _EmailMessage: {
prototype: EmailMessage;
new (from: string, to: string, raw: ReadableStream | string): EmailMessage;
new(from: string, to: string, raw: ReadableStream | string): EmailMessage;
};
export { _EmailMessage as EmailMessage };
}
@@ -10058,17 +10058,17 @@ declare namespace Rpc {
// The reason for using a generic type here is to build a serializable subset of structured
// cloneable composite types. This allows types defined with the "interface" keyword to pass the
// serializable check as well. Otherwise, only types defined with the "type" keyword would pass.
type Serializable<T> =
// Structured cloneables
BaseType
// Structured cloneable composites
| Map<T extends Map<infer U, unknown> ? Serializable<U> : never, T extends Map<unknown, infer U> ? Serializable<U> : never> | Set<T extends Set<infer U> ? Serializable<U> : never> | ReadonlyArray<T extends ReadonlyArray<infer U> ? Serializable<U> : never> | {
[K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
}
// Special types
| Stub<Stubable>
// Serialized as stubs, see `Stubify`
| Stubable;
type Serializable<T> =
// Structured cloneables
BaseType
// Structured cloneable composites
| Map<T extends Map<infer U, unknown> ? Serializable<U> : never, T extends Map<unknown, infer U> ? Serializable<U> : never> | Set<T extends Set<infer U> ? Serializable<U> : never> | ReadonlyArray<T extends ReadonlyArray<infer U> ? Serializable<U> : never> | {
[K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
}
// Special types
| Stub<Stubable>
// Serialized as stubs, see `Stubify`
| Stubable;
// Base type for all RPC stubs, including common memory management methods.
// `T` is used as a marker type for unwrapping `Stub`s later.
interface StubBase<T extends Stubable> extends Disposable {
@@ -10083,8 +10083,8 @@ declare namespace Rpc {
type Stubify<T> = T extends Stubable ? Stub<T> : T extends Map<infer K, infer V> ? Map<Stubify<K>, Stubify<V>> : T extends Set<infer V> ? Set<Stubify<V>> : T extends Array<infer V> ? Array<Stubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Stubify<V>> : T extends BaseType ? T : T extends {
[key: string | number]: any;
} ? {
[K in keyof T]: Stubify<T[K]>;
} : T;
[K in keyof T]: Stubify<T[K]>;
} : T;
// Recursively rewrite all `Stub<T>`s with the corresponding `T`s.
// Note we use `StubBase` instead of `Stub` here to avoid circular dependencies:
// `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`.
@@ -10092,8 +10092,8 @@ declare namespace Rpc {
type Unstubify<T> = T extends StubBase<infer V> ? V : T extends Map<infer K, infer V> ? Map<Unstubify<K>, Unstubify<V>> : T extends Set<infer V> ? Set<Unstubify<V>> : T extends Array<infer V> ? Array<Unstubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Unstubify<V>> : T extends BaseType ? T : T extends {
[key: string | number]: unknown;
} ? {
[K in keyof T]: Unstubify<T[K]>;
} : T;
[K in keyof T]: Unstubify<T[K]>;
} : T;
type UnstubifyAll<A extends any[]> = {
[I in keyof A]: Unstubify<A[I]>;
};
@@ -10166,7 +10166,7 @@ declare namespace Cloudflare {
[K in keyof MainModule]: LoopbackForExport<MainModule[K]>
// If the export is listed in `durableNamespaces`, then it is also a
// DurableObjectNamespace.
& (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace<DoInstance> : DurableObjectNamespace<undefined> : DurableObjectNamespace<undefined> : {});
& (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace<DoInstance> : DurableObjectNamespace<undefined> : DurableObjectNamespace<undefined> : {});
};
}
declare namespace CloudflareWorkersModule {
@@ -10251,6 +10251,9 @@ declare namespace CloudflareWorkersModule {
export const env: Cloudflare.Env;
export const exports: Cloudflare.Exports;
}
declare module 'cloudflare:test' {
export = CloudflareWorkersModule;
}
declare module 'cloudflare:workers' {
export = CloudflareWorkersModule;
}
@@ -10822,10 +10825,10 @@ interface WorkflowInstanceCreateOptions<PARAMS = unknown> {
}
type InstanceStatus = {
status: 'queued' // means that instance is waiting to be started (see concurrency limits)
| 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running
| 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish
| 'waitingForPause' // instance is finishing the current work to pause
| 'unknown';
| 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running
| 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish
| 'waitingForPause' // instance is finishing the current work to pause
| 'unknown';
error?: {
name: string;
message: string;

View File

@@ -39,6 +39,14 @@ deleted_classes = ["AnilistDo"]
tag = "v4"
new_sqlite_classes = ["AnilistDo"]
[[migrations]]
tag = "v5"
deleted_classes = ["AnilistDo"]
[[migrations]]
tag = "v6"
new_sqlite_classes = ["AnilistDo"]
[[queues.producers]]
queue = "anilist-updates"
binding = "ANILIST_UPDATES"
@@ -59,7 +67,7 @@ id = "c8db249d8ee7462b91f9c374321776e4"
preview_id = "ff38240eb2aa4b1388c705f4974f5aec"
[triggers]
crons = ["0 */12 * * *"]
crons = ["0 */9 * * *"]
[[d1_databases]]
binding = "DB"