Compare commits
6 Commits
80a6f67ead
...
eb6dc545e2
| Author | SHA1 | Date | |
|---|---|---|---|
| eb6dc545e2 | |||
| a99961df51 | |||
| d5b113c884 | |||
| 6eb42f6a33 | |||
| 05df043fbe | |||
| fb7990b274 |
@@ -6,6 +6,7 @@
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "wrangler dev src/index.ts --port 8080",
|
||||
"deploy": "wrangler deploy --minify src/index.ts",
|
||||
"env:generate": "tsx src/scripts/generateEnv.ts",
|
||||
"env:verify": "tsx src/scripts/verifyEnv.ts",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
|
||||
@@ -1,204 +1,158 @@
|
||||
import { env } from "cloudflare:test";
|
||||
import { DateTime } from "luxon";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { getTestEnv } from "../test/getTestEnv";
|
||||
import { processDelayedTasks } from "./processDelayedTasks";
|
||||
|
||||
describe("processDelayedTasks", () => {
|
||||
let mockEnv: Cloudflare.Env;
|
||||
let mockCtx: ExecutionContext;
|
||||
let kvGetSpy: ReturnType<typeof vi.fn>;
|
||||
let kvDeleteSpy: ReturnType<typeof vi.fn>;
|
||||
let kvPutSpy: ReturnType<typeof vi.fn>;
|
||||
let queueSendSpy: ReturnType<typeof vi.fn>;
|
||||
|
||||
beforeEach(() => {
|
||||
kvGetSpy = vi.fn(() => Promise.resolve(null));
|
||||
kvDeleteSpy = vi.fn(() => Promise.resolve());
|
||||
kvPutSpy = vi.fn(() => Promise.resolve());
|
||||
queueSendSpy = vi.fn(() => Promise.resolve());
|
||||
|
||||
mockEnv = {
|
||||
DELAYED_TASKS: {
|
||||
get: kvGetSpy,
|
||||
delete: kvDeleteSpy,
|
||||
put: kvPutSpy,
|
||||
list: vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
),
|
||||
getWithMetadata: vi.fn(() =>
|
||||
Promise.resolve({ value: null, metadata: null }),
|
||||
),
|
||||
} as any,
|
||||
NEW_EPISODE: {
|
||||
send: queueSendSpy,
|
||||
} as any,
|
||||
ANILIST_UPDATES: {
|
||||
send: vi.fn(() => Promise.resolve()),
|
||||
} as any,
|
||||
} as any;
|
||||
|
||||
mockCtx = {
|
||||
waitUntil: vi.fn(() => {}),
|
||||
passThroughOnException: vi.fn(() => {}),
|
||||
} as any;
|
||||
beforeEach(async () => {
|
||||
const tasksToDelete = await env.DELAYED_TASKS.list({
|
||||
prefix: "delayed-task:",
|
||||
});
|
||||
console.log(`Found ${tasksToDelete.keys.length} tasks to delete`);
|
||||
for (const task of tasksToDelete.keys) {
|
||||
await env.DELAYED_TASKS.delete(task.name);
|
||||
}
|
||||
});
|
||||
|
||||
it("handles empty KV namespace", async () => {
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
await processDelayedTasks(env);
|
||||
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||
await expect(
|
||||
env.DELAYED_TASKS.list({ prefix: "delayed-task:" }).then(
|
||||
(result) => result.keys,
|
||||
),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
|
||||
it("queues tasks within 12 hours of scheduled time", async () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 6 * 3600; // 6 hours from now
|
||||
|
||||
it("queues tasks within 9 hours of scheduled time", async () => {
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 6 }).toSeconds();
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 123, episodeNumber: 1 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-1",
|
||||
createdAt: now - 18 * 3600,
|
||||
createdAt: now.minus({ hours: 18 }).toSeconds(),
|
||||
retryCount: 0,
|
||||
};
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-1` }],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
);
|
||||
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).toHaveBeenCalledTimes(1);
|
||||
expect(kvDeleteSpy).toHaveBeenCalledTimes(1);
|
||||
expect(kvDeleteSpy).toHaveBeenCalledWith(
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-1`,
|
||||
JSON.stringify(taskMetadata),
|
||||
);
|
||||
|
||||
await processDelayedTasks(env);
|
||||
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-1`),
|
||||
).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it("does not queue tasks beyond 12 hours", async () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 24 * 3600; // 24 hours from now
|
||||
|
||||
it("does not queue tasks beyond 9 hours", async () => {
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 24 }).toSeconds();
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 456, episodeNumber: 2 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-2",
|
||||
createdAt: now,
|
||||
createdAt: now.toSeconds(),
|
||||
retryCount: 0,
|
||||
};
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-2` }],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-2`,
|
||||
JSON.stringify(taskMetadata),
|
||||
);
|
||||
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
await processDelayedTasks(env);
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-2`),
|
||||
).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
it("increments retry count on queue failure", async () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 1 * 3600; // 1 hour from now
|
||||
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 789, episodeNumber: 3 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-3",
|
||||
createdAt: now - 23 * 3600,
|
||||
createdAt: now.minus({ hours: 23 }).toSeconds(),
|
||||
retryCount: 0,
|
||||
};
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-3` }],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
const mockEnv = getTestEnv({
|
||||
NEW_EPISODE: {
|
||||
send: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
},
|
||||
});
|
||||
await mockEnv.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-3`,
|
||||
JSON.stringify(taskMetadata),
|
||||
);
|
||||
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
queueSendSpy.mockRejectedValue(new Error("Queue error"));
|
||||
await processDelayedTasks(mockEnv);
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(kvPutSpy).toHaveBeenCalledTimes(1);
|
||||
const updatedMetadata = JSON.parse(kvPutSpy.mock.calls[0][1]);
|
||||
const updatedMetadata = JSON.parse(
|
||||
(await mockEnv.DELAYED_TASKS.get(
|
||||
`delayed-task:${scheduledTime}:task-3`,
|
||||
))!,
|
||||
);
|
||||
expect(updatedMetadata.retryCount).toBe(1);
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("logs alert after 3 failed attempts", async () => {
|
||||
const consoleErrorSpy = vi.fn(() => {});
|
||||
const originalConsoleError = console.error;
|
||||
console.error = consoleErrorSpy as any;
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 1 * 3600;
|
||||
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 999, episodeNumber: 4 },
|
||||
body: { aniListId: 789, episodeNumber: 4 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-4",
|
||||
createdAt: now - 23 * 3600,
|
||||
retryCount: 2, // Will become 3 after this failure
|
||||
createdAt: now.minus({ hours: 23 }).toSeconds(),
|
||||
retryCount: 2,
|
||||
};
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-4` }],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
const mockEnv = getTestEnv({
|
||||
NEW_EPISODE: {
|
||||
send: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
},
|
||||
});
|
||||
await mockEnv.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-4`,
|
||||
JSON.stringify(taskMetadata),
|
||||
);
|
||||
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
queueSendSpy.mockRejectedValue(new Error("Queue error"));
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
await processDelayedTasks(mockEnv);
|
||||
|
||||
// Check that alert was logged
|
||||
const alertCalls = consoleErrorSpy.mock.calls.filter((call: any) =>
|
||||
call[0]?.includes("🚨 ALERT"),
|
||||
);
|
||||
expect(alertCalls.length).toBeGreaterThan(0);
|
||||
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
it("handles multiple tasks in single cron run", async () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const now = DateTime.now();
|
||||
|
||||
const task1Metadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 100, episodeNumber: 1 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: now + 2 * 3600,
|
||||
scheduledEpochTime: now.plus({ hours: 2 }).toSeconds(),
|
||||
taskId: "task-1",
|
||||
createdAt: now - 20 * 3600,
|
||||
createdAt: now.minus({ hours: 20 }).toSeconds(),
|
||||
retryCount: 0,
|
||||
};
|
||||
|
||||
@@ -206,47 +160,53 @@ describe("processDelayedTasks", () => {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 200, episodeNumber: 2 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: now + 5 * 3600,
|
||||
scheduledEpochTime: now.plus({ hours: 5 }).toSeconds(),
|
||||
taskId: "task-2",
|
||||
createdAt: now - 19 * 3600,
|
||||
createdAt: now.minus({ hours: 19 }).toSeconds(),
|
||||
retryCount: 0,
|
||||
};
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [
|
||||
{ name: `delayed-task:${task1Metadata.scheduledEpochTime}:task-1` },
|
||||
{ name: `delayed-task:${task2Metadata.scheduledEpochTime}:task-2` },
|
||||
],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
|
||||
JSON.stringify(task1Metadata),
|
||||
);
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
|
||||
JSON.stringify(task2Metadata),
|
||||
);
|
||||
|
||||
kvGetSpy
|
||||
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task1Metadata)))
|
||||
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task2Metadata)));
|
||||
await processDelayedTasks(env);
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).toHaveBeenCalledTimes(2);
|
||||
expect(kvDeleteSpy).toHaveBeenCalledTimes(2);
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(
|
||||
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
|
||||
),
|
||||
).resolves.toBeNull();
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(
|
||||
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
|
||||
),
|
||||
).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it("skips tasks with null values in KV", async () => {
|
||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: "delayed-task:123:invalid" }],
|
||||
list_complete: true as const,
|
||||
cacheStatus: null,
|
||||
}),
|
||||
);
|
||||
const queueSendSpy = vi.fn().mockResolvedValue(undefined);
|
||||
const mockEnv = getTestEnv({
|
||||
NEW_EPISODE: {
|
||||
send: queueSendSpy,
|
||||
sendBatch: queueSendSpy,
|
||||
},
|
||||
ANILIST_UPDATES: {
|
||||
send: queueSendSpy,
|
||||
sendBatch: queueSendSpy,
|
||||
},
|
||||
});
|
||||
await mockEnv.DELAYED_TASKS.put(`delayed-task:123:invalid`, null);
|
||||
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(null));
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
await processDelayedTasks(mockEnv);
|
||||
|
||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
await expect(
|
||||
mockEnv.DELAYED_TASKS.get(`delayed-task:123:invalid`),
|
||||
).resolves.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,15 +2,11 @@ import { DateTime } from "luxon";
|
||||
|
||||
import type { DelayedTaskMetadata } from "./delayedTask";
|
||||
import { deserializeDelayedTask } from "./delayedTask";
|
||||
import { queueTask } from "./queueTask";
|
||||
import { MAX_DELAY_SECONDS, queueTask } from "./queueTask";
|
||||
|
||||
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
|
||||
const RETRY_ALERT_THRESHOLD = 3;
|
||||
|
||||
export async function processDelayedTasks(
|
||||
env: Cloudflare.Env,
|
||||
ctx: ExecutionContext,
|
||||
): Promise<void> {
|
||||
export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
|
||||
console.log("Starting delayed task processing cron job");
|
||||
|
||||
const kvNamespace = env.DELAYED_TASKS;
|
||||
@@ -31,7 +27,7 @@ export async function processDelayedTasks(
|
||||
console.log(`Found ${keys.length} delayed tasks to check`);
|
||||
|
||||
const currentTime = Math.floor(Date.now() / 1000);
|
||||
const twelveHoursFromNow = currentTime + MAX_DELAY_SECONDS;
|
||||
const maxQueueTime = currentTime + MAX_DELAY_SECONDS;
|
||||
|
||||
let processedCount = 0;
|
||||
let queuedCount = 0;
|
||||
@@ -40,16 +36,17 @@ export async function processDelayedTasks(
|
||||
for (const key of keys) {
|
||||
try {
|
||||
const value = await kvNamespace.get(key.name);
|
||||
if (!value) {
|
||||
console.warn(`Task key ${key.name} has no value, skipping`);
|
||||
if (!value || value == "null") {
|
||||
console.warn(`Task key ${key.name} has no value, removing`);
|
||||
await kvNamespace.delete(key.name);
|
||||
continue;
|
||||
}
|
||||
|
||||
const metadata: DelayedTaskMetadata = deserializeDelayedTask(value);
|
||||
processedCount++;
|
||||
|
||||
// Check if task is ready to be queued (within 12 hours of scheduled time)
|
||||
if (metadata.scheduledEpochTime <= twelveHoursFromNow) {
|
||||
// Check if task is ready to be queued (within 9 hours of scheduled time)
|
||||
if (metadata.scheduledEpochTime <= maxQueueTime) {
|
||||
const remainingDelay = Math.max(
|
||||
0,
|
||||
metadata.scheduledEpochTime - currentTime,
|
||||
@@ -100,7 +97,7 @@ export async function processDelayedTasks(
|
||||
}
|
||||
} else {
|
||||
const hoursUntilReady =
|
||||
(metadata.scheduledEpochTime - twelveHoursFromNow) / 3600;
|
||||
(metadata.scheduledEpochTime - maxQueueTime) / 3600;
|
||||
console.log(
|
||||
`Task ${metadata.taskId} not ready yet (${hoursUntilReady.toFixed(1)} hours until queueable)`,
|
||||
);
|
||||
|
||||
@@ -30,6 +30,10 @@ interface QueueTaskOptionalArgs {
|
||||
env?: Cloudflare.Env;
|
||||
}
|
||||
|
||||
export const MAX_DELAY_SECONDS = Duration.fromObject({ hours: 9 }).as(
|
||||
"seconds",
|
||||
);
|
||||
|
||||
export async function queueTask(
|
||||
queueName: QueueName,
|
||||
body: QueueBody[QueueName],
|
||||
@@ -42,8 +46,6 @@ export async function queueTask(
|
||||
req?.header(),
|
||||
);
|
||||
|
||||
const MAX_DELAY_SECONDS = Duration.fromObject({ hours: 9 }).as("seconds");
|
||||
|
||||
// If delay exceeds 9 hours, store in KV for later processing
|
||||
if (scheduleTime > MAX_DELAY_SECONDS) {
|
||||
if (!env || !env.DELAYED_TASKS) {
|
||||
|
||||
@@ -8,10 +8,12 @@ export function getTestEnvVariables(): Cloudflare.Env {
|
||||
export function getTestEnv({
|
||||
ADMIN_SDK_JSON = '{"client_email": "test@test.com", "project_id": "test-26g38"}',
|
||||
LOG_DB_QUERIES = "false",
|
||||
...mockEnv
|
||||
}: Partial<Cloudflare.Env> = {}): Cloudflare.Env {
|
||||
return {
|
||||
...env,
|
||||
ADMIN_SDK_JSON,
|
||||
LOG_DB_QUERIES,
|
||||
...mockEnv,
|
||||
};
|
||||
}
|
||||
|
||||
47
worker-configuration.d.ts
vendored
47
worker-configuration.d.ts
vendored
@@ -22,12 +22,12 @@ declare namespace Cloudflare {
|
||||
NEW_EPISODE: Queue;
|
||||
}
|
||||
}
|
||||
interface Env extends Cloudflare.Env {}
|
||||
interface Env extends Cloudflare.Env { }
|
||||
type StringifyValues<EnvType extends Record<string, unknown>> = {
|
||||
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
|
||||
};
|
||||
declare namespace NodeJS {
|
||||
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> {}
|
||||
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> { }
|
||||
}
|
||||
|
||||
// Begin runtime types
|
||||
@@ -1644,7 +1644,7 @@ declare abstract class Body {
|
||||
*/
|
||||
declare var Response: {
|
||||
prototype: Response;
|
||||
new (body?: BodyInit | null, init?: ResponseInit): Response;
|
||||
new(body?: BodyInit | null, init?: ResponseInit): Response;
|
||||
error(): Response;
|
||||
redirect(url: string, status?: number): Response;
|
||||
json(any: any, maybeInit?: (ResponseInit | Response)): Response;
|
||||
@@ -2192,7 +2192,7 @@ interface ReadableStream<R = any> {
|
||||
*/
|
||||
declare const ReadableStream: {
|
||||
prototype: ReadableStream;
|
||||
new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
|
||||
new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
|
||||
new <R = any>(underlyingSource?: UnderlyingSource<R>, strategy?: QueuingStrategy<R>): ReadableStream<R>;
|
||||
};
|
||||
/**
|
||||
@@ -3034,7 +3034,7 @@ type WebSocketEventMap = {
|
||||
*/
|
||||
declare var WebSocket: {
|
||||
prototype: WebSocket;
|
||||
new (url: string, protocols?: (string[] | string)): WebSocket;
|
||||
new(url: string, protocols?: (string[] | string)): WebSocket;
|
||||
readonly READY_STATE_CONNECTING: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly READY_STATE_OPEN: number;
|
||||
@@ -3091,7 +3091,7 @@ interface WebSocket extends EventTarget<WebSocketEventMap> {
|
||||
extensions: string | null;
|
||||
}
|
||||
declare const WebSocketPair: {
|
||||
new (): {
|
||||
new(): {
|
||||
0: WebSocket;
|
||||
1: WebSocket;
|
||||
};
|
||||
@@ -9414,19 +9414,19 @@ interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder {
|
||||
}
|
||||
/** Possible outcomes of TLS verification */
|
||||
declare type CertVerificationStatus =
|
||||
/** Authentication succeeded */
|
||||
"SUCCESS"
|
||||
/** No certificate was presented */
|
||||
/** Authentication succeeded */
|
||||
"SUCCESS"
|
||||
/** No certificate was presented */
|
||||
| "NONE"
|
||||
/** Failed because the certificate was self-signed */
|
||||
/** Failed because the certificate was self-signed */
|
||||
| "FAILED:self signed certificate"
|
||||
/** Failed because the certificate failed a trust chain check */
|
||||
/** Failed because the certificate failed a trust chain check */
|
||||
| "FAILED:unable to verify the first certificate"
|
||||
/** Failed because the certificate not yet valid */
|
||||
/** Failed because the certificate not yet valid */
|
||||
| "FAILED:certificate is not yet valid"
|
||||
/** Failed because the certificate is expired */
|
||||
/** Failed because the certificate is expired */
|
||||
| "FAILED:certificate has expired"
|
||||
/** Failed for another unspecified reason */
|
||||
/** Failed for another unspecified reason */
|
||||
| "FAILED";
|
||||
/**
|
||||
* An upstream endpoint's response to a TCP `keepalive` message from Cloudflare.
|
||||
@@ -9478,13 +9478,13 @@ interface D1ExecResult {
|
||||
duration: number;
|
||||
}
|
||||
type D1SessionConstraint =
|
||||
// Indicates that the first query should go to the primary, and the rest queries
|
||||
// using the same D1DatabaseSession will go to any replica that is consistent with
|
||||
// the bookmark maintained by the session (returned by the first query).
|
||||
'first-primary'
|
||||
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
|
||||
// using the same D1DatabaseSession will go to any replica that is consistent with
|
||||
// the bookmark maintained by the session (returned by the first query).
|
||||
// Indicates that the first query should go to the primary, and the rest queries
|
||||
// using the same D1DatabaseSession will go to any replica that is consistent with
|
||||
// the bookmark maintained by the session (returned by the first query).
|
||||
'first-primary'
|
||||
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
|
||||
// using the same D1DatabaseSession will go to any replica that is consistent with
|
||||
// the bookmark maintained by the session (returned by the first query).
|
||||
| 'first-unconstrained';
|
||||
type D1SessionBookmark = string;
|
||||
declare abstract class D1Database {
|
||||
@@ -9599,7 +9599,7 @@ declare type EmailExportedHandler<Env = unknown> = (message: ForwardableEmailMes
|
||||
declare module "cloudflare:email" {
|
||||
let _EmailMessage: {
|
||||
prototype: EmailMessage;
|
||||
new (from: string, to: string, raw: ReadableStream | string): EmailMessage;
|
||||
new(from: string, to: string, raw: ReadableStream | string): EmailMessage;
|
||||
};
|
||||
export { _EmailMessage as EmailMessage };
|
||||
}
|
||||
@@ -10251,6 +10251,9 @@ declare namespace CloudflareWorkersModule {
|
||||
export const env: Cloudflare.Env;
|
||||
export const exports: Cloudflare.Exports;
|
||||
}
|
||||
declare module 'cloudflare:test' {
|
||||
export = CloudflareWorkersModule;
|
||||
}
|
||||
declare module 'cloudflare:workers' {
|
||||
export = CloudflareWorkersModule;
|
||||
}
|
||||
|
||||
@@ -39,6 +39,14 @@ deleted_classes = ["AnilistDo"]
|
||||
tag = "v4"
|
||||
new_sqlite_classes = ["AnilistDo"]
|
||||
|
||||
[[migrations]]
|
||||
tag = "v5"
|
||||
deleted_classes = ["AnilistDo"]
|
||||
|
||||
[[migrations]]
|
||||
tag = "v6"
|
||||
new_sqlite_classes = ["AnilistDo"]
|
||||
|
||||
[[queues.producers]]
|
||||
queue = "anilist-updates"
|
||||
binding = "ANILIST_UPDATES"
|
||||
@@ -59,7 +67,7 @@ id = "c8db249d8ee7462b91f9c374321776e4"
|
||||
preview_id = "ff38240eb2aa4b1388c705f4974f5aec"
|
||||
|
||||
[triggers]
|
||||
crons = ["0 */12 * * *"]
|
||||
crons = ["0 */9 * * *"]
|
||||
|
||||
[[d1_databases]]
|
||||
binding = "DB"
|
||||
|
||||
Reference in New Issue
Block a user