Compare commits
32 Commits
7793588e0b
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 0b237d542b | |||
| c01e005afb | |||
| e5d9d62be2 | |||
| 8d63d4fa5e | |||
| 07bece1f6c | |||
| 2ed38e92bc | |||
| 26ca15d4aa | |||
| 4c96f58cb0 | |||
| b64bd4fc26 | |||
| 4c2d0a9177 | |||
| dc60a1e045 | |||
| 6570c25617 | |||
| 6f795bdde0 | |||
| 243c279ca9 | |||
| 286824e3a1 | |||
| b26d22ad91 | |||
| 3c5685dbdb | |||
| c527a6eac5 | |||
| f16ac80b7e | |||
| cd04a75b06 | |||
| eb6dc545e2 | |||
| a99961df51 | |||
| d5b113c884 | |||
| 6eb42f6a33 | |||
| 05df043fbe | |||
| fb7990b274 | |||
| 80a6f67ead | |||
| 1501aff3b6 | |||
| 9b17f5bcfe | |||
| 45bf96e764 | |||
| e8c7c7801f | |||
| 1140ffa8b8 |
@@ -1,16 +0,0 @@
|
|||||||
node_modules
|
|
||||||
Dockerfile*
|
|
||||||
docker-compose*
|
|
||||||
.dockerignore
|
|
||||||
.git
|
|
||||||
.gitignore
|
|
||||||
README.md
|
|
||||||
LICENSE
|
|
||||||
.vscode
|
|
||||||
Makefile
|
|
||||||
helm-charts
|
|
||||||
.env
|
|
||||||
.dev.vars
|
|
||||||
.editorconfig
|
|
||||||
.idea
|
|
||||||
coverage*
|
|
||||||
@@ -1 +1 @@
|
|||||||
bunx lint-staged
|
nlx lint-staged
|
||||||
|
|||||||
41
Dockerfile
41
Dockerfile
@@ -1,41 +0,0 @@
|
|||||||
# use the official Bun image
|
|
||||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
|
||||||
FROM oven/bun:1 as base
|
|
||||||
WORKDIR /usr/app
|
|
||||||
|
|
||||||
# install dependencies into temp directory
|
|
||||||
# this will cache them and speed up future builds
|
|
||||||
FROM base AS install
|
|
||||||
RUN mkdir -p /tmp/dev
|
|
||||||
COPY package.json bun.lockb /tmp/dev/
|
|
||||||
RUN cd /tmp/dev && bun install --frozen-lockfile
|
|
||||||
|
|
||||||
# install with --production (exclude devDependencies)
|
|
||||||
RUN mkdir -p /tmp/prod
|
|
||||||
COPY package.json bun.lockb /tmp/prod/
|
|
||||||
RUN cd /tmp/prod && bun install --frozen-lockfile --production
|
|
||||||
|
|
||||||
# copy node_modules from temp directory
|
|
||||||
# then copy all (non-ignored) project files into the image
|
|
||||||
FROM base AS prerelease
|
|
||||||
COPY --from=install /tmp/dev/node_modules node_modules
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# [optional] tests & build
|
|
||||||
ENV NODE_ENV=production
|
|
||||||
RUN bun test
|
|
||||||
RUN bun build --compile src/index.ts --outfile=aniplay
|
|
||||||
|
|
||||||
# copy production dependencies and source code into final image
|
|
||||||
FROM base AS release
|
|
||||||
COPY --from=install /tmp/prod/node_modules node_modules
|
|
||||||
COPY --from=prerelease /usr/app/src ./src
|
|
||||||
COPY --from=prerelease /usr/app/package.json .
|
|
||||||
COPY --from=prerelease /usr/app/tsconfig.json .
|
|
||||||
# TODO: uncomment once v2 is ready
|
|
||||||
# COPY --from=prerelease /usr/app/drizzle.config.ts .
|
|
||||||
|
|
||||||
# run the app
|
|
||||||
USER bun
|
|
||||||
EXPOSE 3000
|
|
||||||
ENTRYPOINT [ "bun", "run", "prod:server" ]
|
|
||||||
76
README.md
76
README.md
@@ -1,12 +1,72 @@
|
|||||||
```
|
# Aniplay API
|
||||||
npm install
|
|
||||||
npm run dev
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
API for [Aniplay](https://github.com/silverAndroid/aniplay), built with Cloudflare Workers, Hono, and Drizzle ORM.
|
||||||
npm run deploy
|
|
||||||
```
|
## Tech Stack
|
||||||
|
|
||||||
|
- **Cloudflare Workers**: Serverless execution environment.
|
||||||
|
- **Hono**: Ultrafast web framework (OpenAPI).
|
||||||
|
- **GraphQL**: Used internally for communicating with the [AniList](https://anilist.co) API.
|
||||||
|
- **Drizzle ORM**: TypeScript ORM for D1 (Cloudflare's serverless SQL database).
|
||||||
|
- **Vitest**: Testing framework.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- **Node.js**
|
||||||
|
- **pnpm**: Package manager.
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
1. **Installation**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm install
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Environment Setup**
|
||||||
|
Generate the environment types:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm exec wrangler types
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Database Setup**
|
||||||
|
Apply migrations to the local D1 database:
|
||||||
|
```bash
|
||||||
|
pnpm exec wrangler d1 migrations apply aniplay
|
||||||
|
```
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
If a route is internal-only or doesn't need to appear on the OpenAPI spec (that's autogenerated by Hono), use the `Hono` class. Otherwise, use the `OpenAPIHono` class from `@hono/zod-openapi`.
|
### Running Locally
|
||||||
|
|
||||||
|
Start the development server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
Run the tests using Vitest:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
Deploy to Cloudflare Workers:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm run deploy
|
||||||
|
```
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
- `src/controllers`: API route handlers (titles, episodes, search, etc.)
|
||||||
|
- `src/libs`: Shared utilities and logic (AniList integration, background tasks)
|
||||||
|
- `src/middleware`: Middleware handlers (authentication, authorization, etc.)
|
||||||
|
- `src/models`: Database schema and models
|
||||||
|
- `src/scripts`: Utility scripts for maintenance and setup
|
||||||
|
- `src/types`: TypeScript type definitions
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
[test]
|
|
||||||
preload = [
|
|
||||||
"./testSetup.ts",
|
|
||||||
"./src/mocks/consumet.ts",
|
|
||||||
"./src/mocks/getGoogleAuthToken.ts",
|
|
||||||
"./src/mocks/cloudflare.ts",
|
|
||||||
]
|
|
||||||
@@ -6,8 +6,7 @@
|
|||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "wrangler dev src/index.ts --port 8080",
|
"dev": "wrangler dev src/index.ts --port 8080",
|
||||||
"env:generate": "tsx src/scripts/generateEnv.ts",
|
"deploy": "wrangler deploy --minify src/index.ts",
|
||||||
"env:verify": "tsx src/scripts/verifyEnv.ts",
|
|
||||||
"db:generate": "drizzle-kit generate",
|
"db:generate": "drizzle-kit generate",
|
||||||
"db:migrate": "drizzle-kit migrate",
|
"db:migrate": "drizzle-kit migrate",
|
||||||
"test": "vitest",
|
"test": "vitest",
|
||||||
@@ -17,7 +16,6 @@
|
|||||||
"tsx": "tsx"
|
"tsx": "tsx"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@consumet/extensions": "github:consumet/consumet.ts#3dd0ccb",
|
|
||||||
"@hono/swagger-ui": "^0.5.1",
|
"@hono/swagger-ui": "^0.5.1",
|
||||||
"@hono/zod-openapi": "^0.19.5",
|
"@hono/zod-openapi": "^0.19.5",
|
||||||
"@hono/zod-validator": "^0.2.2",
|
"@hono/zod-validator": "^0.2.2",
|
||||||
@@ -27,15 +25,14 @@
|
|||||||
"graphql-request": "^7.4.0",
|
"graphql-request": "^7.4.0",
|
||||||
"hono": "^4.7.7",
|
"hono": "^4.7.7",
|
||||||
"jose": "^5.10.0",
|
"jose": "^5.10.0",
|
||||||
"lodash.isequal": "^4.5.0",
|
|
||||||
"lodash.mapkeys": "^4.6.0",
|
"lodash.mapkeys": "^4.6.0",
|
||||||
"luxon": "^3.6.1",
|
"luxon": "^3.6.1",
|
||||||
"zod": "^3.24.3"
|
"zod": "^3.24.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@cloudflare/vitest-pool-workers": "^0.10.15",
|
"@cloudflare/vitest-pool-workers": "^0.10.15",
|
||||||
|
"@graphql-typed-document-node/core": "^3.2.0",
|
||||||
"@trivago/prettier-plugin-sort-imports": "^4.3.0",
|
"@trivago/prettier-plugin-sort-imports": "^4.3.0",
|
||||||
"@types/lodash.isequal": "^4.5.8",
|
|
||||||
"@types/lodash.mapkeys": "^4.6.9",
|
"@types/lodash.mapkeys": "^4.6.9",
|
||||||
"@types/luxon": "^3.6.2",
|
"@types/luxon": "^3.6.2",
|
||||||
"@types/node": "^25.0.1",
|
"@types/node": "^25.0.1",
|
||||||
@@ -54,7 +51,6 @@
|
|||||||
"ts-morph": "^22.0.0",
|
"ts-morph": "^22.0.0",
|
||||||
"tsx": "^4.20.6",
|
"tsx": "^4.20.6",
|
||||||
"typescript": "^5.8.3",
|
"typescript": "^5.8.3",
|
||||||
"util": "^0.12.5",
|
|
||||||
"vitest": "~3.2.4",
|
"vitest": "~3.2.4",
|
||||||
"wrangler": "^4.51.0",
|
"wrangler": "^4.51.0",
|
||||||
"zx": "8.1.5"
|
"zx": "8.1.5"
|
||||||
|
|||||||
554
pnpm-lock.yaml
generated
554
pnpm-lock.yaml
generated
@@ -7,9 +7,6 @@ settings:
|
|||||||
importers:
|
importers:
|
||||||
.:
|
.:
|
||||||
dependencies:
|
dependencies:
|
||||||
"@consumet/extensions":
|
|
||||||
specifier: github:consumet/consumet.ts#3dd0ccb
|
|
||||||
version: https://codeload.github.com/consumet/consumet.ts/tar.gz/3dd0ccb
|
|
||||||
"@hono/swagger-ui":
|
"@hono/swagger-ui":
|
||||||
specifier: ^0.5.1
|
specifier: ^0.5.1
|
||||||
version: 0.5.2(hono@4.10.8)
|
version: 0.5.2(hono@4.10.8)
|
||||||
@@ -37,9 +34,6 @@ importers:
|
|||||||
jose:
|
jose:
|
||||||
specifier: ^5.10.0
|
specifier: ^5.10.0
|
||||||
version: 5.10.0
|
version: 5.10.0
|
||||||
lodash.isequal:
|
|
||||||
specifier: ^4.5.0
|
|
||||||
version: 4.5.0
|
|
||||||
lodash.mapkeys:
|
lodash.mapkeys:
|
||||||
specifier: ^4.6.0
|
specifier: ^4.6.0
|
||||||
version: 4.6.0
|
version: 4.6.0
|
||||||
@@ -53,12 +47,12 @@ importers:
|
|||||||
"@cloudflare/vitest-pool-workers":
|
"@cloudflare/vitest-pool-workers":
|
||||||
specifier: ^0.10.15
|
specifier: ^0.10.15
|
||||||
version: 0.10.15(@vitest/runner@3.2.4)(@vitest/snapshot@3.2.4)(vitest@3.2.4)
|
version: 0.10.15(@vitest/runner@3.2.4)(@vitest/snapshot@3.2.4)(vitest@3.2.4)
|
||||||
|
"@graphql-typed-document-node/core":
|
||||||
|
specifier: ^3.2.0
|
||||||
|
version: 3.2.0(graphql@16.12.0)
|
||||||
"@trivago/prettier-plugin-sort-imports":
|
"@trivago/prettier-plugin-sort-imports":
|
||||||
specifier: ^4.3.0
|
specifier: ^4.3.0
|
||||||
version: 4.3.0(prettier@3.7.4)
|
version: 4.3.0(prettier@3.7.4)
|
||||||
"@types/lodash.isequal":
|
|
||||||
specifier: ^4.5.8
|
|
||||||
version: 4.5.8
|
|
||||||
"@types/lodash.mapkeys":
|
"@types/lodash.mapkeys":
|
||||||
specifier: ^4.6.9
|
specifier: ^4.6.9
|
||||||
version: 4.6.9
|
version: 4.6.9
|
||||||
@@ -113,9 +107,6 @@ importers:
|
|||||||
typescript:
|
typescript:
|
||||||
specifier: ^5.8.3
|
specifier: ^5.8.3
|
||||||
version: 5.9.3
|
version: 5.9.3
|
||||||
util:
|
|
||||||
specifier: ^0.12.5
|
|
||||||
version: 0.12.5
|
|
||||||
vitest:
|
vitest:
|
||||||
specifier: ~3.2.4
|
specifier: ~3.2.4
|
||||||
version: 3.2.4(@types/node@25.0.1)(@vitest/ui@3.2.4)(tsx@4.21.0)(yaml@2.8.2)
|
version: 3.2.4(@types/node@25.0.1)(@vitest/ui@3.2.4)(tsx@4.21.0)(yaml@2.8.2)
|
||||||
@@ -438,13 +429,6 @@ packages:
|
|||||||
cpu: [x64]
|
cpu: [x64]
|
||||||
os: [win32]
|
os: [win32]
|
||||||
|
|
||||||
"@consumet/extensions@https://codeload.github.com/consumet/consumet.ts/tar.gz/3dd0ccb":
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
tarball: https://codeload.github.com/consumet/consumet.ts/tar.gz/3dd0ccb,
|
|
||||||
}
|
|
||||||
version: 1.7.0
|
|
||||||
|
|
||||||
"@cspotcode/source-map-support@0.8.1":
|
"@cspotcode/source-map-support@0.8.1":
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -1969,12 +1953,6 @@ packages:
|
|||||||
integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==,
|
integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==,
|
||||||
}
|
}
|
||||||
|
|
||||||
"@types/lodash.isequal@4.5.8":
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-uput6pg4E/tj2LGxCZo9+y27JNyB2OZuuI/T5F+ylVDYuqICLG2/ktjxx0v6GvVntAf8TvEzeQLcV0ffRirXuA==,
|
|
||||||
}
|
|
||||||
|
|
||||||
"@types/lodash.mapkeys@4.6.9":
|
"@types/lodash.mapkeys@4.6.9":
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2154,12 +2132,6 @@ packages:
|
|||||||
integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==,
|
integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==,
|
||||||
}
|
}
|
||||||
|
|
||||||
ascii-url-encoder@1.2.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-jRQMrz1ZqL7gnK60Xbu1S8kDi01UN+lHBnhvXZZqjd7nms7t4jOv4o9nE8bbfDh1Ts/KEtW2nXGR5CmydDBmlw==,
|
|
||||||
}
|
|
||||||
|
|
||||||
assertion-error@2.0.1:
|
assertion-error@2.0.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2173,19 +2145,6 @@ packages:
|
|||||||
integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==,
|
integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==,
|
||||||
}
|
}
|
||||||
|
|
||||||
available-typed-arrays@1.0.7:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
axios@0.27.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==,
|
|
||||||
}
|
|
||||||
|
|
||||||
balanced-match@1.0.2:
|
balanced-match@1.0.2:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2211,12 +2170,6 @@ packages:
|
|||||||
integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==,
|
integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==,
|
||||||
}
|
}
|
||||||
|
|
||||||
boolbase@1.0.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==,
|
|
||||||
}
|
|
||||||
|
|
||||||
brace-expansion@2.0.2:
|
brace-expansion@2.0.2:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2264,20 +2217,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">= 0.4" }
|
engines: { node: ">= 0.4" }
|
||||||
|
|
||||||
call-bind@1.0.8:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
call-bound@1.0.4:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
caniuse-lite@1.0.30001760:
|
caniuse-lite@1.0.30001760:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2305,19 +2244,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">= 16" }
|
engines: { node: ">= 16" }
|
||||||
|
|
||||||
cheerio-select@2.1.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==,
|
|
||||||
}
|
|
||||||
|
|
||||||
cheerio@1.1.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-IkxPpb5rS/d1IiLbHMgfPuS0FgiWTtFIm/Nj+2woXDLTZ7fOT2eqzgYbdMlLweqlHbsZjxEChoVK+7iph7jyQg==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=20.18.1" }
|
|
||||||
|
|
||||||
cjs-module-lexer@1.4.3:
|
cjs-module-lexer@1.4.3:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2423,25 +2349,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">= 8" }
|
engines: { node: ">= 8" }
|
||||||
|
|
||||||
crypto-js@4.2.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==,
|
|
||||||
}
|
|
||||||
|
|
||||||
css-select@5.2.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==,
|
|
||||||
}
|
|
||||||
|
|
||||||
css-what@6.2.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 6" }
|
|
||||||
|
|
||||||
data-uri-to-buffer@2.0.2:
|
data-uri-to-buffer@2.0.2:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2467,13 +2374,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=6" }
|
engines: { node: ">=6" }
|
||||||
|
|
||||||
define-data-property@1.1.4:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
delayed-stream@1.0.0:
|
delayed-stream@1.0.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2494,31 +2394,6 @@ packages:
|
|||||||
integrity: sha512-jDwizj+IlEZBunHcOuuFVBnIMPAEHvTsJj0BcIp94xYguLRVBcXO853px/MyIJvbVzWdsGvrRweIUWJw8hBP7A==,
|
integrity: sha512-jDwizj+IlEZBunHcOuuFVBnIMPAEHvTsJj0BcIp94xYguLRVBcXO853px/MyIJvbVzWdsGvrRweIUWJw8hBP7A==,
|
||||||
}
|
}
|
||||||
|
|
||||||
dom-serializer@2.0.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==,
|
|
||||||
}
|
|
||||||
|
|
||||||
domelementtype@2.3.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==,
|
|
||||||
}
|
|
||||||
|
|
||||||
domhandler@5.0.3:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 4" }
|
|
||||||
|
|
||||||
domutils@3.2.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==,
|
|
||||||
}
|
|
||||||
|
|
||||||
dotenv@17.2.3:
|
dotenv@17.2.3:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2671,26 +2546,6 @@ packages:
|
|||||||
integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==,
|
integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==,
|
||||||
}
|
}
|
||||||
|
|
||||||
encoding-sniffer@0.2.1:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw==,
|
|
||||||
}
|
|
||||||
|
|
||||||
entities@4.5.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=0.12" }
|
|
||||||
|
|
||||||
entities@6.0.1:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=0.12" }
|
|
||||||
|
|
||||||
environment@1.1.0:
|
environment@1.1.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2875,25 +2730,6 @@ packages:
|
|||||||
integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==,
|
integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==,
|
||||||
}
|
}
|
||||||
|
|
||||||
follow-redirects@1.15.11:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=4.0" }
|
|
||||||
peerDependencies:
|
|
||||||
debug: "*"
|
|
||||||
peerDependenciesMeta:
|
|
||||||
debug:
|
|
||||||
optional: true
|
|
||||||
|
|
||||||
for-each@0.3.5:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
foreground-child@3.3.1:
|
foreground-child@3.3.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -2942,13 +2778,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=14" }
|
engines: { node: ">=14" }
|
||||||
|
|
||||||
generator-function@2.0.1:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
gensync@1.0.0-beta.2:
|
gensync@1.0.0-beta.2:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3068,12 +2897,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=8" }
|
engines: { node: ">=8" }
|
||||||
|
|
||||||
has-property-descriptors@1.0.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==,
|
|
||||||
}
|
|
||||||
|
|
||||||
has-symbols@1.1.0:
|
has-symbols@1.1.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3108,12 +2931,6 @@ packages:
|
|||||||
integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==,
|
integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==,
|
||||||
}
|
}
|
||||||
|
|
||||||
htmlparser2@10.0.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==,
|
|
||||||
}
|
|
||||||
|
|
||||||
https-proxy-agent@7.0.6:
|
https-proxy-agent@7.0.6:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3142,39 +2959,12 @@ packages:
|
|||||||
engines: { node: ">=18" }
|
engines: { node: ">=18" }
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
iconv-lite@0.6.3:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=0.10.0" }
|
|
||||||
|
|
||||||
inherits@2.0.4:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==,
|
|
||||||
}
|
|
||||||
|
|
||||||
is-arguments@1.2.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
is-arrayish@0.3.4:
|
is-arrayish@0.3.4:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
integrity: sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==,
|
integrity: sha512-m6UrgzFVUYawGBh1dUsWR5M2Clqic9RVXC/9f8ceNlv2IcO9j9J/z8UoCLPqtsPBFNzEpfR3xftohbfqDx8EQA==,
|
||||||
}
|
}
|
||||||
|
|
||||||
is-callable@1.2.7:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
is-extglob@2.1.1:
|
is-extglob@2.1.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3203,13 +2993,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=18" }
|
engines: { node: ">=18" }
|
||||||
|
|
||||||
is-generator-function@1.1.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
is-glob@4.0.3:
|
is-glob@4.0.3:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3224,13 +3007,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=0.12.0" }
|
engines: { node: ">=0.12.0" }
|
||||||
|
|
||||||
is-regex@1.2.1:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
is-stream@2.0.1:
|
is-stream@2.0.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3245,13 +3021,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
|
engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
|
||||||
|
|
||||||
is-typed-array@1.1.15:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
isexe@2.0.0:
|
isexe@2.0.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3388,13 +3157,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=18.0.0" }
|
engines: { node: ">=18.0.0" }
|
||||||
|
|
||||||
lodash.isequal@4.5.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==,
|
|
||||||
}
|
|
||||||
deprecated: This package is deprecated. Use require('node:util').isDeepStrictEqual instead.
|
|
||||||
|
|
||||||
lodash.mapkeys@4.6.0:
|
lodash.mapkeys@4.6.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3620,12 +3382,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
|
engines: { node: ^12.20.0 || ^14.13.1 || >=16.0.0 }
|
||||||
|
|
||||||
nth-check@2.1.1:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==,
|
|
||||||
}
|
|
||||||
|
|
||||||
onetime@6.0.0:
|
onetime@6.0.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3652,24 +3408,6 @@ packages:
|
|||||||
integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==,
|
integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==,
|
||||||
}
|
}
|
||||||
|
|
||||||
parse5-htmlparser2-tree-adapter@7.1.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==,
|
|
||||||
}
|
|
||||||
|
|
||||||
parse5-parser-stream@7.1.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==,
|
|
||||||
}
|
|
||||||
|
|
||||||
parse5@7.3.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==,
|
|
||||||
}
|
|
||||||
|
|
||||||
path-browserify@1.0.1:
|
path-browserify@1.0.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3744,13 +3482,6 @@ packages:
|
|||||||
engines: { node: ">=0.10" }
|
engines: { node: ">=0.10" }
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
possible-typed-array-names@1.1.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
postcss@8.5.6:
|
postcss@8.5.6:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3833,19 +3564,6 @@ packages:
|
|||||||
integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==,
|
integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==,
|
||||||
}
|
}
|
||||||
|
|
||||||
safe-regex-test@1.1.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
safer-buffer@2.1.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==,
|
|
||||||
}
|
|
||||||
|
|
||||||
semver@6.3.1:
|
semver@6.3.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -3861,13 +3579,6 @@ packages:
|
|||||||
engines: { node: ">=10" }
|
engines: { node: ">=10" }
|
||||||
hasBin: true
|
hasBin: true
|
||||||
|
|
||||||
set-function-length@1.2.2:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
sharp@0.33.5:
|
sharp@0.33.5:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -4178,13 +3889,6 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=20.18.1" }
|
engines: { node: ">=20.18.1" }
|
||||||
|
|
||||||
undici@7.16.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=20.18.1" }
|
|
||||||
|
|
||||||
unenv@2.0.0-rc.24:
|
unenv@2.0.0-rc.24:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -4200,12 +3904,6 @@ packages:
|
|||||||
peerDependencies:
|
peerDependencies:
|
||||||
browserslist: ">= 4.21.0"
|
browserslist: ">= 4.21.0"
|
||||||
|
|
||||||
util@0.12.5:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==,
|
|
||||||
}
|
|
||||||
|
|
||||||
uuid@9.0.1:
|
uuid@9.0.1:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -4308,33 +4006,12 @@ packages:
|
|||||||
integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==,
|
integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==,
|
||||||
}
|
}
|
||||||
|
|
||||||
whatwg-encoding@3.1.1:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=18" }
|
|
||||||
|
|
||||||
whatwg-mimetype@4.0.0:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==,
|
|
||||||
}
|
|
||||||
engines: { node: ">=18" }
|
|
||||||
|
|
||||||
whatwg-url@5.0.0:
|
whatwg-url@5.0.0:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==,
|
integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==,
|
||||||
}
|
}
|
||||||
|
|
||||||
which-typed-array@1.1.19:
|
|
||||||
resolution:
|
|
||||||
{
|
|
||||||
integrity: sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==,
|
|
||||||
}
|
|
||||||
engines: { node: ">= 0.4" }
|
|
||||||
|
|
||||||
which@2.0.2:
|
which@2.0.2:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -4684,17 +4361,6 @@ snapshots:
|
|||||||
"@cloudflare/workerd-windows-64@1.20251210.0":
|
"@cloudflare/workerd-windows-64@1.20251210.0":
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
"@consumet/extensions@https://codeload.github.com/consumet/consumet.ts/tar.gz/3dd0ccb":
|
|
||||||
dependencies:
|
|
||||||
ascii-url-encoder: 1.2.0
|
|
||||||
axios: 0.27.2
|
|
||||||
cheerio: 1.1.2
|
|
||||||
crypto-js: 4.2.0
|
|
||||||
form-data: 4.0.5
|
|
||||||
husky: 9.1.7
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- debug
|
|
||||||
|
|
||||||
"@cspotcode/source-map-support@0.8.1":
|
"@cspotcode/source-map-support@0.8.1":
|
||||||
dependencies:
|
dependencies:
|
||||||
"@jridgewell/trace-mapping": 0.3.9
|
"@jridgewell/trace-mapping": 0.3.9
|
||||||
@@ -5311,10 +4977,6 @@ snapshots:
|
|||||||
"@types/node": 25.0.1
|
"@types/node": 25.0.1
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
"@types/lodash.isequal@4.5.8":
|
|
||||||
dependencies:
|
|
||||||
"@types/lodash": 4.17.21
|
|
||||||
|
|
||||||
"@types/lodash.mapkeys@4.6.9":
|
"@types/lodash.mapkeys@4.6.9":
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/lodash": 4.17.21
|
"@types/lodash": 4.17.21
|
||||||
@@ -5437,23 +5099,10 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
printable-characters: 1.0.42
|
printable-characters: 1.0.42
|
||||||
|
|
||||||
ascii-url-encoder@1.2.0: {}
|
|
||||||
|
|
||||||
assertion-error@2.0.1: {}
|
assertion-error@2.0.1: {}
|
||||||
|
|
||||||
asynckit@0.4.0: {}
|
asynckit@0.4.0: {}
|
||||||
|
|
||||||
available-typed-arrays@1.0.7:
|
|
||||||
dependencies:
|
|
||||||
possible-typed-array-names: 1.1.0
|
|
||||||
|
|
||||||
axios@0.27.2:
|
|
||||||
dependencies:
|
|
||||||
follow-redirects: 1.15.11
|
|
||||||
form-data: 4.0.5
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- debug
|
|
||||||
|
|
||||||
balanced-match@1.0.2: {}
|
balanced-match@1.0.2: {}
|
||||||
|
|
||||||
baseline-browser-mapping@2.9.7: {}
|
baseline-browser-mapping@2.9.7: {}
|
||||||
@@ -5462,8 +5111,6 @@ snapshots:
|
|||||||
|
|
||||||
blake3-wasm@2.1.5: {}
|
blake3-wasm@2.1.5: {}
|
||||||
|
|
||||||
boolbase@1.0.0: {}
|
|
||||||
|
|
||||||
brace-expansion@2.0.2:
|
brace-expansion@2.0.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
balanced-match: 1.0.2
|
balanced-match: 1.0.2
|
||||||
@@ -5491,18 +5138,6 @@ snapshots:
|
|||||||
es-errors: 1.3.0
|
es-errors: 1.3.0
|
||||||
function-bind: 1.1.2
|
function-bind: 1.1.2
|
||||||
|
|
||||||
call-bind@1.0.8:
|
|
||||||
dependencies:
|
|
||||||
call-bind-apply-helpers: 1.0.2
|
|
||||||
es-define-property: 1.0.1
|
|
||||||
get-intrinsic: 1.3.0
|
|
||||||
set-function-length: 1.2.2
|
|
||||||
|
|
||||||
call-bound@1.0.4:
|
|
||||||
dependencies:
|
|
||||||
call-bind-apply-helpers: 1.0.2
|
|
||||||
get-intrinsic: 1.3.0
|
|
||||||
|
|
||||||
caniuse-lite@1.0.30001760: {}
|
caniuse-lite@1.0.30001760: {}
|
||||||
|
|
||||||
chai@5.3.3:
|
chai@5.3.3:
|
||||||
@@ -5517,29 +5152,6 @@ snapshots:
|
|||||||
|
|
||||||
check-error@2.1.1: {}
|
check-error@2.1.1: {}
|
||||||
|
|
||||||
cheerio-select@2.1.0:
|
|
||||||
dependencies:
|
|
||||||
boolbase: 1.0.0
|
|
||||||
css-select: 5.2.2
|
|
||||||
css-what: 6.2.2
|
|
||||||
domelementtype: 2.3.0
|
|
||||||
domhandler: 5.0.3
|
|
||||||
domutils: 3.2.2
|
|
||||||
|
|
||||||
cheerio@1.1.2:
|
|
||||||
dependencies:
|
|
||||||
cheerio-select: 2.1.0
|
|
||||||
dom-serializer: 2.0.0
|
|
||||||
domhandler: 5.0.3
|
|
||||||
domutils: 3.2.2
|
|
||||||
encoding-sniffer: 0.2.1
|
|
||||||
htmlparser2: 10.0.0
|
|
||||||
parse5: 7.3.0
|
|
||||||
parse5-htmlparser2-tree-adapter: 7.1.0
|
|
||||||
parse5-parser-stream: 7.1.2
|
|
||||||
undici: 7.16.0
|
|
||||||
whatwg-mimetype: 4.0.0
|
|
||||||
|
|
||||||
cjs-module-lexer@1.4.3: {}
|
cjs-module-lexer@1.4.3: {}
|
||||||
|
|
||||||
cli-cursor@5.0.0:
|
cli-cursor@5.0.0:
|
||||||
@@ -5601,18 +5213,6 @@ snapshots:
|
|||||||
shebang-command: 2.0.0
|
shebang-command: 2.0.0
|
||||||
which: 2.0.2
|
which: 2.0.2
|
||||||
|
|
||||||
crypto-js@4.2.0: {}
|
|
||||||
|
|
||||||
css-select@5.2.2:
|
|
||||||
dependencies:
|
|
||||||
boolbase: 1.0.0
|
|
||||||
css-what: 6.2.2
|
|
||||||
domhandler: 5.0.3
|
|
||||||
domutils: 3.2.2
|
|
||||||
nth-check: 2.1.1
|
|
||||||
|
|
||||||
css-what@6.2.2: {}
|
|
||||||
|
|
||||||
data-uri-to-buffer@2.0.2: {}
|
data-uri-to-buffer@2.0.2: {}
|
||||||
|
|
||||||
debug@4.4.3:
|
debug@4.4.3:
|
||||||
@@ -5621,36 +5221,12 @@ snapshots:
|
|||||||
|
|
||||||
deep-eql@5.0.2: {}
|
deep-eql@5.0.2: {}
|
||||||
|
|
||||||
define-data-property@1.1.4:
|
|
||||||
dependencies:
|
|
||||||
es-define-property: 1.0.1
|
|
||||||
es-errors: 1.3.0
|
|
||||||
gopd: 1.2.0
|
|
||||||
|
|
||||||
delayed-stream@1.0.0: {}
|
delayed-stream@1.0.0: {}
|
||||||
|
|
||||||
detect-libc@2.1.2: {}
|
detect-libc@2.1.2: {}
|
||||||
|
|
||||||
devalue@5.6.1: {}
|
devalue@5.6.1: {}
|
||||||
|
|
||||||
dom-serializer@2.0.0:
|
|
||||||
dependencies:
|
|
||||||
domelementtype: 2.3.0
|
|
||||||
domhandler: 5.0.3
|
|
||||||
entities: 4.5.0
|
|
||||||
|
|
||||||
domelementtype@2.3.0: {}
|
|
||||||
|
|
||||||
domhandler@5.0.3:
|
|
||||||
dependencies:
|
|
||||||
domelementtype: 2.3.0
|
|
||||||
|
|
||||||
domutils@3.2.2:
|
|
||||||
dependencies:
|
|
||||||
dom-serializer: 2.0.0
|
|
||||||
domelementtype: 2.3.0
|
|
||||||
domhandler: 5.0.3
|
|
||||||
|
|
||||||
dotenv@17.2.3: {}
|
dotenv@17.2.3: {}
|
||||||
|
|
||||||
drizzle-kit@0.31.8:
|
drizzle-kit@0.31.8:
|
||||||
@@ -5684,15 +5260,6 @@ snapshots:
|
|||||||
|
|
||||||
emoji-regex@9.2.2: {}
|
emoji-regex@9.2.2: {}
|
||||||
|
|
||||||
encoding-sniffer@0.2.1:
|
|
||||||
dependencies:
|
|
||||||
iconv-lite: 0.6.3
|
|
||||||
whatwg-encoding: 3.1.1
|
|
||||||
|
|
||||||
entities@4.5.0: {}
|
|
||||||
|
|
||||||
entities@6.0.1: {}
|
|
||||||
|
|
||||||
environment@1.1.0: {}
|
environment@1.1.0: {}
|
||||||
|
|
||||||
error-stack-parser-es@1.0.5: {}
|
error-stack-parser-es@1.0.5: {}
|
||||||
@@ -5885,12 +5452,6 @@ snapshots:
|
|||||||
|
|
||||||
flatted@3.3.3: {}
|
flatted@3.3.3: {}
|
||||||
|
|
||||||
follow-redirects@1.15.11: {}
|
|
||||||
|
|
||||||
for-each@0.3.5:
|
|
||||||
dependencies:
|
|
||||||
is-callable: 1.2.7
|
|
||||||
|
|
||||||
foreground-child@3.3.1:
|
foreground-child@3.3.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
cross-spawn: 7.0.6
|
cross-spawn: 7.0.6
|
||||||
@@ -5927,8 +5488,6 @@ snapshots:
|
|||||||
- encoding
|
- encoding
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
generator-function@2.0.1: {}
|
|
||||||
|
|
||||||
gensync@1.0.0-beta.2: {}
|
gensync@1.0.0-beta.2: {}
|
||||||
|
|
||||||
get-east-asian-width@1.4.0: {}
|
get-east-asian-width@1.4.0: {}
|
||||||
@@ -6010,10 +5569,6 @@ snapshots:
|
|||||||
|
|
||||||
has-flag@4.0.0: {}
|
has-flag@4.0.0: {}
|
||||||
|
|
||||||
has-property-descriptors@1.0.2:
|
|
||||||
dependencies:
|
|
||||||
es-define-property: 1.0.1
|
|
||||||
|
|
||||||
has-symbols@1.1.0: {}
|
has-symbols@1.1.0: {}
|
||||||
|
|
||||||
has-tostringtag@1.0.2:
|
has-tostringtag@1.0.2:
|
||||||
@@ -6028,13 +5583,6 @@ snapshots:
|
|||||||
|
|
||||||
html-escaper@2.0.2: {}
|
html-escaper@2.0.2: {}
|
||||||
|
|
||||||
htmlparser2@10.0.0:
|
|
||||||
dependencies:
|
|
||||||
domelementtype: 2.3.0
|
|
||||||
domhandler: 5.0.3
|
|
||||||
domutils: 3.2.2
|
|
||||||
entities: 6.0.1
|
|
||||||
|
|
||||||
https-proxy-agent@7.0.6:
|
https-proxy-agent@7.0.6:
|
||||||
dependencies:
|
dependencies:
|
||||||
agent-base: 7.1.4
|
agent-base: 7.1.4
|
||||||
@@ -6050,21 +5598,8 @@ snapshots:
|
|||||||
|
|
||||||
husky@9.1.7: {}
|
husky@9.1.7: {}
|
||||||
|
|
||||||
iconv-lite@0.6.3:
|
|
||||||
dependencies:
|
|
||||||
safer-buffer: 2.1.2
|
|
||||||
|
|
||||||
inherits@2.0.4: {}
|
|
||||||
|
|
||||||
is-arguments@1.2.0:
|
|
||||||
dependencies:
|
|
||||||
call-bound: 1.0.4
|
|
||||||
has-tostringtag: 1.0.2
|
|
||||||
|
|
||||||
is-arrayish@0.3.4: {}
|
is-arrayish@0.3.4: {}
|
||||||
|
|
||||||
is-callable@1.2.7: {}
|
|
||||||
|
|
||||||
is-extglob@2.1.1: {}
|
is-extglob@2.1.1: {}
|
||||||
|
|
||||||
is-fullwidth-code-point@3.0.0: {}
|
is-fullwidth-code-point@3.0.0: {}
|
||||||
@@ -6075,35 +5610,16 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
get-east-asian-width: 1.4.0
|
get-east-asian-width: 1.4.0
|
||||||
|
|
||||||
is-generator-function@1.1.2:
|
|
||||||
dependencies:
|
|
||||||
call-bound: 1.0.4
|
|
||||||
generator-function: 2.0.1
|
|
||||||
get-proto: 1.0.1
|
|
||||||
has-tostringtag: 1.0.2
|
|
||||||
safe-regex-test: 1.1.0
|
|
||||||
|
|
||||||
is-glob@4.0.3:
|
is-glob@4.0.3:
|
||||||
dependencies:
|
dependencies:
|
||||||
is-extglob: 2.1.1
|
is-extglob: 2.1.1
|
||||||
|
|
||||||
is-number@7.0.0: {}
|
is-number@7.0.0: {}
|
||||||
|
|
||||||
is-regex@1.2.1:
|
|
||||||
dependencies:
|
|
||||||
call-bound: 1.0.4
|
|
||||||
gopd: 1.2.0
|
|
||||||
has-tostringtag: 1.0.2
|
|
||||||
hasown: 2.0.2
|
|
||||||
|
|
||||||
is-stream@2.0.1: {}
|
is-stream@2.0.1: {}
|
||||||
|
|
||||||
is-stream@3.0.0: {}
|
is-stream@3.0.0: {}
|
||||||
|
|
||||||
is-typed-array@1.1.15:
|
|
||||||
dependencies:
|
|
||||||
which-typed-array: 1.1.19
|
|
||||||
|
|
||||||
isexe@2.0.0: {}
|
isexe@2.0.0: {}
|
||||||
|
|
||||||
istanbul-lib-coverage@3.2.2: {}
|
istanbul-lib-coverage@3.2.2: {}
|
||||||
@@ -6196,8 +5712,6 @@ snapshots:
|
|||||||
rfdc: 1.4.1
|
rfdc: 1.4.1
|
||||||
wrap-ansi: 9.0.2
|
wrap-ansi: 9.0.2
|
||||||
|
|
||||||
lodash.isequal@4.5.0: {}
|
|
||||||
|
|
||||||
lodash.mapkeys@4.6.0: {}
|
lodash.mapkeys@4.6.0: {}
|
||||||
|
|
||||||
lodash@4.17.21: {}
|
lodash@4.17.21: {}
|
||||||
@@ -6320,10 +5834,6 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
path-key: 4.0.0
|
path-key: 4.0.0
|
||||||
|
|
||||||
nth-check@2.1.1:
|
|
||||||
dependencies:
|
|
||||||
boolbase: 1.0.0
|
|
||||||
|
|
||||||
onetime@6.0.0:
|
onetime@6.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
mimic-fn: 4.0.0
|
mimic-fn: 4.0.0
|
||||||
@@ -6338,19 +5848,6 @@ snapshots:
|
|||||||
|
|
||||||
package-json-from-dist@1.0.1: {}
|
package-json-from-dist@1.0.1: {}
|
||||||
|
|
||||||
parse5-htmlparser2-tree-adapter@7.1.0:
|
|
||||||
dependencies:
|
|
||||||
domhandler: 5.0.3
|
|
||||||
parse5: 7.3.0
|
|
||||||
|
|
||||||
parse5-parser-stream@7.1.2:
|
|
||||||
dependencies:
|
|
||||||
parse5: 7.3.0
|
|
||||||
|
|
||||||
parse5@7.3.0:
|
|
||||||
dependencies:
|
|
||||||
entities: 6.0.1
|
|
||||||
|
|
||||||
path-browserify@1.0.1: {}
|
path-browserify@1.0.1: {}
|
||||||
|
|
||||||
path-key@3.1.1: {}
|
path-key@3.1.1: {}
|
||||||
@@ -6376,8 +5873,6 @@ snapshots:
|
|||||||
|
|
||||||
pidtree@0.6.0: {}
|
pidtree@0.6.0: {}
|
||||||
|
|
||||||
possible-typed-array-names@1.1.0: {}
|
|
||||||
|
|
||||||
postcss@8.5.6:
|
postcss@8.5.6:
|
||||||
dependencies:
|
dependencies:
|
||||||
nanoid: 3.3.11
|
nanoid: 3.3.11
|
||||||
@@ -6440,27 +5935,10 @@ snapshots:
|
|||||||
|
|
||||||
safe-buffer@5.2.1: {}
|
safe-buffer@5.2.1: {}
|
||||||
|
|
||||||
safe-regex-test@1.1.0:
|
|
||||||
dependencies:
|
|
||||||
call-bound: 1.0.4
|
|
||||||
es-errors: 1.3.0
|
|
||||||
is-regex: 1.2.1
|
|
||||||
|
|
||||||
safer-buffer@2.1.2: {}
|
|
||||||
|
|
||||||
semver@6.3.1: {}
|
semver@6.3.1: {}
|
||||||
|
|
||||||
semver@7.7.3: {}
|
semver@7.7.3: {}
|
||||||
|
|
||||||
set-function-length@1.2.2:
|
|
||||||
dependencies:
|
|
||||||
define-data-property: 1.1.4
|
|
||||||
es-errors: 1.3.0
|
|
||||||
function-bind: 1.1.2
|
|
||||||
get-intrinsic: 1.3.0
|
|
||||||
gopd: 1.2.0
|
|
||||||
has-property-descriptors: 1.0.2
|
|
||||||
|
|
||||||
sharp@0.33.5:
|
sharp@0.33.5:
|
||||||
dependencies:
|
dependencies:
|
||||||
color: 4.2.3
|
color: 4.2.3
|
||||||
@@ -6637,8 +6115,6 @@ snapshots:
|
|||||||
|
|
||||||
undici@7.14.0: {}
|
undici@7.14.0: {}
|
||||||
|
|
||||||
undici@7.16.0: {}
|
|
||||||
|
|
||||||
unenv@2.0.0-rc.24:
|
unenv@2.0.0-rc.24:
|
||||||
dependencies:
|
dependencies:
|
||||||
pathe: 2.0.3
|
pathe: 2.0.3
|
||||||
@@ -6649,14 +6125,6 @@ snapshots:
|
|||||||
escalade: 3.2.0
|
escalade: 3.2.0
|
||||||
picocolors: 1.1.1
|
picocolors: 1.1.1
|
||||||
|
|
||||||
util@0.12.5:
|
|
||||||
dependencies:
|
|
||||||
inherits: 2.0.4
|
|
||||||
is-arguments: 1.2.0
|
|
||||||
is-generator-function: 1.1.2
|
|
||||||
is-typed-array: 1.1.15
|
|
||||||
which-typed-array: 1.1.19
|
|
||||||
|
|
||||||
uuid@9.0.1: {}
|
uuid@9.0.1: {}
|
||||||
|
|
||||||
vite-node@3.2.4(@types/node@25.0.1)(tsx@4.21.0)(yaml@2.8.2):
|
vite-node@3.2.4(@types/node@25.0.1)(tsx@4.21.0)(yaml@2.8.2):
|
||||||
@@ -6740,27 +6208,11 @@ snapshots:
|
|||||||
|
|
||||||
webidl-conversions@3.0.1: {}
|
webidl-conversions@3.0.1: {}
|
||||||
|
|
||||||
whatwg-encoding@3.1.1:
|
|
||||||
dependencies:
|
|
||||||
iconv-lite: 0.6.3
|
|
||||||
|
|
||||||
whatwg-mimetype@4.0.0: {}
|
|
||||||
|
|
||||||
whatwg-url@5.0.0:
|
whatwg-url@5.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
tr46: 0.0.3
|
tr46: 0.0.3
|
||||||
webidl-conversions: 3.0.1
|
webidl-conversions: 3.0.1
|
||||||
|
|
||||||
which-typed-array@1.1.19:
|
|
||||||
dependencies:
|
|
||||||
available-typed-arrays: 1.0.7
|
|
||||||
call-bind: 1.0.8
|
|
||||||
call-bound: 1.0.4
|
|
||||||
for-each: 0.3.5
|
|
||||||
get-proto: 1.0.1
|
|
||||||
gopd: 1.2.0
|
|
||||||
has-tostringtag: 1.0.2
|
|
||||||
|
|
||||||
which@2.0.2:
|
which@2.0.2:
|
||||||
dependencies:
|
dependencies:
|
||||||
isexe: 2.0.0
|
isexe: 2.0.0
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
import { ANIME, META } from "@consumet/extensions";
|
|
||||||
import fetchAdapter from "@haverstack/axios-fetch-adapter";
|
|
||||||
|
|
||||||
const gogoAnime = new ANIME.Gogoanime(undefined, undefined, fetchAdapter);
|
|
||||||
export const aniList = new META.Anilist(gogoAnime, undefined, fetchAdapter);
|
|
||||||
@@ -52,7 +52,7 @@ app.openapi(route, async (c) => {
|
|||||||
// Check if we should use mock data
|
// Check if we should use mock data
|
||||||
const { useMockData } = await import("~/libs/useMockData");
|
const { useMockData } = await import("~/libs/useMockData");
|
||||||
if (useMockData()) {
|
if (useMockData()) {
|
||||||
const { mockEpisodes } = await import("~/mocks/mockData");
|
const { mockEpisodes } = await import("~/mocks");
|
||||||
|
|
||||||
return c.json({
|
return c.json({
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@@ -1,64 +0,0 @@
|
|||||||
import { sortByProperty } from "~/libs/sortByProperty";
|
|
||||||
import type { FetchUrlResponse } from "~/types/episode/fetch-url-response";
|
|
||||||
|
|
||||||
import { type SkipTime, convertSkipTime } from "./convertSkipTime";
|
|
||||||
import {
|
|
||||||
audioPriority,
|
|
||||||
qualityPriority,
|
|
||||||
subtitlesPriority,
|
|
||||||
} from "./priorities";
|
|
||||||
|
|
||||||
export async function getSourcesFromAnify(
|
|
||||||
provider: string,
|
|
||||||
watchId: string,
|
|
||||||
aniListId: number,
|
|
||||||
): Promise<FetchUrlResponse | null> {
|
|
||||||
const response = await fetch("https://anify.eltik.cc/sources", {
|
|
||||||
body: JSON.stringify({
|
|
||||||
watchId,
|
|
||||||
providerId: provider,
|
|
||||||
episodeNumber: "1",
|
|
||||||
id: aniListId.toString(),
|
|
||||||
subType: "sub",
|
|
||||||
}),
|
|
||||||
method: "POST",
|
|
||||||
}).then((res) => res.json() as Promise<AnifySourcesResponse>);
|
|
||||||
const { sources, subtitles, audio, intro, outro, headers } = response;
|
|
||||||
|
|
||||||
if (!sources || sources.length === 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const source = sources.sort(sortByProperty(qualityPriority, "quality"))[0]
|
|
||||||
?.url;
|
|
||||||
subtitles?.sort(sortByProperty(subtitlesPriority, "lang"));
|
|
||||||
audio?.sort(sortByProperty(audioPriority, "lang"));
|
|
||||||
|
|
||||||
return {
|
|
||||||
source,
|
|
||||||
audio,
|
|
||||||
subtitles,
|
|
||||||
intro: convertSkipTime(intro),
|
|
||||||
outro: convertSkipTime(outro),
|
|
||||||
headers: Object.keys(headers ?? {}).length > 0 ? headers : undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
interface AnifySourcesResponse {
|
|
||||||
sources: VideoSource[];
|
|
||||||
subtitles: LanguageSource[];
|
|
||||||
audio: LanguageSource[];
|
|
||||||
intro: SkipTime;
|
|
||||||
outro: SkipTime;
|
|
||||||
headers?: Record<string, string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface VideoSource {
|
|
||||||
url: string;
|
|
||||||
quality: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface LanguageSource {
|
|
||||||
url: string;
|
|
||||||
lang: string;
|
|
||||||
}
|
|
||||||
@@ -123,7 +123,7 @@ app.openapi(route, async (c) => {
|
|||||||
// Check if we should use mock data
|
// Check if we should use mock data
|
||||||
const { useMockData } = await import("~/libs/useMockData");
|
const { useMockData } = await import("~/libs/useMockData");
|
||||||
if (useMockData()) {
|
if (useMockData()) {
|
||||||
const { mockEpisodeUrl } = await import("~/mocks/mockData");
|
const { mockEpisodeUrl } = await import("~/mocks");
|
||||||
|
|
||||||
return c.json({ success: true, result: mockEpisodeUrl });
|
return c.json({ success: true, result: mockEpisodeUrl });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ app.openapi(route, async (c) => {
|
|||||||
isComplete,
|
isComplete,
|
||||||
);
|
);
|
||||||
if (isComplete) {
|
if (isComplete) {
|
||||||
await updateWatchStatus(c.req, deviceId, aniListId, "COMPLETED");
|
await updateWatchStatus(deviceId, aniListId, "COMPLETED");
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ type AiringSchedule = {
|
|||||||
id: number;
|
id: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function getUpcomingTitlesFromAnilist(req: HonoRequest) {
|
export async function getUpcomingTitlesFromAnilist() {
|
||||||
const durableObjectId = env.ANILIST_DO.idFromName("GLOBAL");
|
const durableObjectId = env.ANILIST_DO.idFromName("GLOBAL");
|
||||||
const stub = env.ANILIST_DO.get(durableObjectId);
|
const stub = env.ANILIST_DO.get(durableObjectId);
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import { getUpcomingTitlesFromAnilist } from "./anilist";
|
|||||||
|
|
||||||
const app = new Hono();
|
const app = new Hono();
|
||||||
|
|
||||||
app.post("/", async (c) => {
|
export async function checkUpcomingTitles() {
|
||||||
const titles = await getUpcomingTitlesFromAnilist(c.req);
|
const titles = await getUpcomingTitlesFromAnilist();
|
||||||
|
|
||||||
await Promise.allSettled(
|
await Promise.allSettled(
|
||||||
titles.map(async (title) => {
|
titles.map(async (title) => {
|
||||||
@@ -44,6 +44,10 @@ app.post("/", async (c) => {
|
|||||||
});
|
});
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
app.post("/", async (c) => {
|
||||||
|
await checkUpcomingTitles();
|
||||||
|
|
||||||
return c.json(SuccessResponse, 200);
|
return c.json(SuccessResponse, 200);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ export async function fetchPopularTitlesFromAnilist(
|
|||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
case "upcoming":
|
case "upcoming":
|
||||||
data = await stub.nextSeasonPopular(next.season, next.year, limit);
|
data = await stub.nextSeasonPopular(next.season, next.year, page, limit);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unknown category: ${category}`);
|
throw new Error(`Unknown category: ${category}`);
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ app.openapi(route, async (c) => {
|
|||||||
// Check if we should use mock data
|
// Check if we should use mock data
|
||||||
const { useMockData } = await import("~/libs/useMockData");
|
const { useMockData } = await import("~/libs/useMockData");
|
||||||
if (useMockData()) {
|
if (useMockData()) {
|
||||||
const { mockSearchResults } = await import("~/mocks/mockData");
|
const { mockSearchResults } = await import("~/mocks");
|
||||||
|
|
||||||
// Paginate mock results
|
// Paginate mock results
|
||||||
const startIndex = (page - 1) * limit;
|
const startIndex = (page - 1) * limit;
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ describe('requests the "/title" route', () => {
|
|||||||
headers: new Headers({ "x-anilist-token": "asd" }),
|
headers: new Headers({ "x-anilist-token": "asd" }),
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(await response.json()).toMatchSnapshot();
|
await expect(response.json()).resolves.toMatchSnapshot();
|
||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ describe('requests the "/title" route', () => {
|
|||||||
|
|
||||||
const response = await app.request("/title?id=10");
|
const response = await app.request("/title?id=10");
|
||||||
|
|
||||||
expect(await response.json()).toMatchSnapshot();
|
await expect(response.json()).resolves.toMatchSnapshot();
|
||||||
expect(response.status).toBe(200);
|
expect(response.status).toBe(200);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -75,7 +75,7 @@ describe('requests the "/title" route', () => {
|
|||||||
|
|
||||||
const response = await app.request("/title?id=-1");
|
const response = await app.request("/title?id=-1");
|
||||||
|
|
||||||
expect(await response.json()).toEqual({ success: false });
|
await expect(response.json()).resolves.toEqual({ success: false });
|
||||||
expect(response.status).toBe(404);
|
expect(response.status).toBe(404);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
|||||||
|
|
||||||
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
|
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
|
||||||
import { fetchFromMultipleSources } from "~/libs/fetchFromMultipleSources";
|
import { fetchFromMultipleSources } from "~/libs/fetchFromMultipleSources";
|
||||||
|
import { userProfileMiddleware } from "~/middleware/userProfile";
|
||||||
import {
|
import {
|
||||||
AniListIdQuerySchema,
|
AniListIdQuerySchema,
|
||||||
ErrorResponse,
|
ErrorResponse,
|
||||||
@@ -9,6 +10,7 @@ import {
|
|||||||
SuccessResponseSchema,
|
SuccessResponseSchema,
|
||||||
} from "~/types/schema";
|
} from "~/types/schema";
|
||||||
import { Title } from "~/types/title";
|
import { Title } from "~/types/title";
|
||||||
|
import type { User } from "~/types/user";
|
||||||
|
|
||||||
const app = new OpenAPIHono();
|
const app = new OpenAPIHono();
|
||||||
|
|
||||||
@@ -40,6 +42,7 @@ const route = createRoute({
|
|||||||
description: "Title could not be found",
|
description: "Title could not be found",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
middleware: [userProfileMiddleware],
|
||||||
});
|
});
|
||||||
|
|
||||||
app.openapi(route, async (c) => {
|
app.openapi(route, async (c) => {
|
||||||
@@ -49,13 +52,18 @@ app.openapi(route, async (c) => {
|
|||||||
// Check if we should use mock data
|
// Check if we should use mock data
|
||||||
const { useMockData } = await import("~/libs/useMockData");
|
const { useMockData } = await import("~/libs/useMockData");
|
||||||
if (useMockData()) {
|
if (useMockData()) {
|
||||||
const { mockTitleDetails } = await import("~/mocks/mockData");
|
const { mockTitleDetails } = await import("~/mocks");
|
||||||
|
|
||||||
return c.json({ success: true, result: mockTitleDetails() }, 200);
|
return c.json({ success: true, result: mockTitleDetails() }, 200);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { result: title, errorOccurred } = await fetchFromMultipleSources([
|
const { result: title, errorOccurred } = await fetchFromMultipleSources([
|
||||||
() => fetchTitleFromAnilist(aniListId, aniListToken ?? undefined),
|
() =>
|
||||||
|
fetchTitleFromAnilist(
|
||||||
|
aniListId,
|
||||||
|
(c.get("user") as User)?.id,
|
||||||
|
aniListToken ?? undefined,
|
||||||
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (errorOccurred) {
|
if (errorOccurred) {
|
||||||
|
|||||||
@@ -30,19 +30,11 @@ const DeleteMediaListEntryMutation = graphql(`
|
|||||||
}
|
}
|
||||||
`);
|
`);
|
||||||
|
|
||||||
/** Updates the watch status for a title on Anilist. If the token is null, the watch status will not be updated.
|
export async function updateWatchStatusOnAnilist(
|
||||||
*
|
|
||||||
* @returns true if the watch status was updated or if the token was null, false if it was not
|
|
||||||
*/
|
|
||||||
export async function maybeUpdateWatchStatusOnAnilist(
|
|
||||||
titleId: number,
|
titleId: number,
|
||||||
watchStatus: WatchStatus | null,
|
watchStatus: WatchStatus | null,
|
||||||
aniListToken: string | undefined,
|
aniListToken: string,
|
||||||
) {
|
) {
|
||||||
if (!aniListToken) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new GraphQLClient("https://graphql.anilist.co/");
|
const client = new GraphQLClient("https://graphql.anilist.co/");
|
||||||
const headers = new Headers({ Authorization: `Bearer ${aniListToken}` });
|
const headers = new Headers({ Authorization: `Bearer ${aniListToken}` });
|
||||||
|
|
||||||
|
|||||||
@@ -22,8 +22,6 @@ vi.mock("~/mocks", () => ({
|
|||||||
describe("requests the /watch-status route", () => {
|
describe("requests the /watch-status route", () => {
|
||||||
const db = getTestDb(env);
|
const db = getTestDb(env);
|
||||||
let app: typeof import("../../../src/index").app;
|
let app: typeof import("../../../src/index").app;
|
||||||
let maybeUpdateWatchStatusOnAnilist: any;
|
|
||||||
let queueTask: any;
|
|
||||||
let maybeScheduleNextAiringEpisode: any;
|
let maybeScheduleNextAiringEpisode: any;
|
||||||
let removeTask: any;
|
let removeTask: any;
|
||||||
|
|
||||||
@@ -31,10 +29,6 @@ describe("requests the /watch-status route", () => {
|
|||||||
await resetTestDb(db);
|
await resetTestDb(db);
|
||||||
vi.resetModules();
|
vi.resetModules();
|
||||||
|
|
||||||
vi.doMock("./anilist", () => ({
|
|
||||||
maybeUpdateWatchStatusOnAnilist: vi.fn().mockResolvedValue(undefined),
|
|
||||||
}));
|
|
||||||
|
|
||||||
vi.doMock("~/libs/tasks/queueTask", () => ({
|
vi.doMock("~/libs/tasks/queueTask", () => ({
|
||||||
queueTask: vi.fn().mockResolvedValue(undefined),
|
queueTask: vi.fn().mockResolvedValue(undefined),
|
||||||
}));
|
}));
|
||||||
@@ -52,10 +46,6 @@ describe("requests the /watch-status route", () => {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
app = (await import("~/index")).app;
|
app = (await import("~/index")).app;
|
||||||
maybeUpdateWatchStatusOnAnilist = (
|
|
||||||
await import("~/controllers/watch-status/anilist")
|
|
||||||
).maybeUpdateWatchStatusOnAnilist;
|
|
||||||
queueTask = (await import("~/libs/tasks/queueTask")).queueTask;
|
|
||||||
removeTask = (await import("~/libs/tasks/removeTask")).removeTask;
|
removeTask = (await import("~/libs/tasks/removeTask")).removeTask;
|
||||||
maybeScheduleNextAiringEpisode = (
|
maybeScheduleNextAiringEpisode = (
|
||||||
await import("~/libs/maybeScheduleNextAiringEpisode")
|
await import("~/libs/maybeScheduleNextAiringEpisode")
|
||||||
@@ -119,34 +109,6 @@ describe("requests the /watch-status route", () => {
|
|||||||
expect(res.status).toBe(500);
|
expect(res.status).toBe(500);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("saving title, Anilist request fails, should succeed", async () => {
|
|
||||||
vi.mocked(maybeUpdateWatchStatusOnAnilist).mockRejectedValue(
|
|
||||||
new Error("Anilist failed"),
|
|
||||||
);
|
|
||||||
|
|
||||||
const res = await app.request(
|
|
||||||
"/watch-status",
|
|
||||||
{
|
|
||||||
method: "POST",
|
|
||||||
headers: new Headers({
|
|
||||||
"x-anilist-token": "asd",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
}),
|
|
||||||
body: JSON.stringify({
|
|
||||||
deviceId: "123",
|
|
||||||
watchStatus: "CURRENT",
|
|
||||||
titleId: -1,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
env,
|
|
||||||
);
|
|
||||||
|
|
||||||
await expect(res.json()).resolves.toEqual({ success: true });
|
|
||||||
expect(res.status).toBe(200);
|
|
||||||
// Should queue task if direct update fails
|
|
||||||
expect(queueTask).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("watch status is null, should succeed", async () => {
|
it("watch status is null, should succeed", async () => {
|
||||||
const res = await app.request(
|
const res = await app.request(
|
||||||
"/watch-status",
|
"/watch-status",
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||||
import type { HonoRequest } from "hono";
|
|
||||||
|
|
||||||
import { AnilistUpdateType } from "~/libs/anilist/updateType.ts";
|
import { AnilistUpdateType } from "~/libs/anilist/updateType.ts";
|
||||||
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
|
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
|
||||||
@@ -16,15 +15,12 @@ import {
|
|||||||
} from "~/types/schema";
|
} from "~/types/schema";
|
||||||
import { WatchStatus } from "~/types/title/watchStatus";
|
import { WatchStatus } from "~/types/title/watchStatus";
|
||||||
|
|
||||||
import { maybeUpdateWatchStatusOnAnilist } from "./anilist";
|
|
||||||
|
|
||||||
const app = new OpenAPIHono<Cloudflare.Env>();
|
const app = new OpenAPIHono<Cloudflare.Env>();
|
||||||
|
|
||||||
const UpdateWatchStatusRequest = z.object({
|
const UpdateWatchStatusRequest = z.object({
|
||||||
deviceId: z.string(),
|
deviceId: z.string(),
|
||||||
watchStatus: WatchStatus.nullable(),
|
watchStatus: WatchStatus.nullable(),
|
||||||
titleId: AniListIdSchema,
|
titleId: AniListIdSchema,
|
||||||
isRetrying: z.boolean().optional().default(false),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const route = createRoute({
|
const route = createRoute({
|
||||||
@@ -66,7 +62,6 @@ const route = createRoute({
|
|||||||
});
|
});
|
||||||
|
|
||||||
export async function updateWatchStatus(
|
export async function updateWatchStatus(
|
||||||
req: HonoRequest,
|
|
||||||
deviceId: string,
|
deviceId: string,
|
||||||
titleId: number,
|
titleId: number,
|
||||||
watchStatus: WatchStatus | null,
|
watchStatus: WatchStatus | null,
|
||||||
@@ -84,14 +79,8 @@ export async function updateWatchStatus(
|
|||||||
}
|
}
|
||||||
|
|
||||||
app.openapi(route, async (c) => {
|
app.openapi(route, async (c) => {
|
||||||
const {
|
const { deviceId, watchStatus, titleId } =
|
||||||
deviceId,
|
await c.req.json<typeof UpdateWatchStatusRequest._type>();
|
||||||
watchStatus,
|
|
||||||
titleId,
|
|
||||||
isRetrying = false,
|
|
||||||
} = await c.req.json<typeof UpdateWatchStatusRequest._type>();
|
|
||||||
const aniListToken = c.req.header("X-AniList-Token");
|
|
||||||
|
|
||||||
// Check if we should use mock data
|
// Check if we should use mock data
|
||||||
const { useMockData } = await import("~/libs/useMockData");
|
const { useMockData } = await import("~/libs/useMockData");
|
||||||
if (useMockData()) {
|
if (useMockData()) {
|
||||||
@@ -99,35 +88,24 @@ app.openapi(route, async (c) => {
|
|||||||
return c.json(SuccessResponse, { status: 200 });
|
return c.json(SuccessResponse, { status: 200 });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isRetrying) {
|
try {
|
||||||
try {
|
await updateWatchStatus(deviceId, titleId, watchStatus);
|
||||||
await updateWatchStatus(c.req, deviceId, titleId, watchStatus);
|
} catch (error) {
|
||||||
} catch (error) {
|
console.error("Error setting watch status");
|
||||||
console.error("Error setting watch status");
|
console.error(error);
|
||||||
console.error(error);
|
return c.json(ErrorResponse, { status: 500 });
|
||||||
return c.json(ErrorResponse, { status: 500 });
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
const aniListToken = c.req.header("X-AniList-Token");
|
||||||
await maybeUpdateWatchStatusOnAnilist(
|
if (aniListToken) {
|
||||||
Number(titleId),
|
|
||||||
watchStatus,
|
|
||||||
aniListToken,
|
|
||||||
);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Failed to update watch status on Anilist");
|
|
||||||
console.error(error);
|
|
||||||
if (isRetrying) {
|
|
||||||
return c.json(ErrorResponse, { status: 500 });
|
|
||||||
}
|
|
||||||
|
|
||||||
await queueTask(
|
await queueTask(
|
||||||
"ANILIST_UPDATES",
|
"ANILIST_UPDATES",
|
||||||
{
|
{
|
||||||
deviceId,
|
[AnilistUpdateType.UpdateWatchStatus]: {
|
||||||
watchStatus,
|
aniListToken,
|
||||||
titleId,
|
titleId,
|
||||||
|
watchStatus,
|
||||||
|
},
|
||||||
updateType: AnilistUpdateType.UpdateWatchStatus,
|
updateType: AnilistUpdateType.UpdateWatchStatus,
|
||||||
},
|
},
|
||||||
{ req: c.req, scheduleConfig: { delay: { minute: 1 } } },
|
{ req: c.req, scheduleConfig: { delay: { minute: 1 } } },
|
||||||
|
|||||||
120
src/index.ts
120
src/index.ts
@@ -1,11 +1,18 @@
|
|||||||
import { swaggerUI } from "@hono/swagger-ui";
|
import { swaggerUI } from "@hono/swagger-ui";
|
||||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||||
|
import { Duration, type DurationLike } from "luxon";
|
||||||
|
|
||||||
import { maybeUpdateLastConnectedAt } from "~/controllers/maybeUpdateLastConnectedAt";
|
import { onNewEpisode } from "~/controllers/internal/new-episode";
|
||||||
|
import { AnilistUpdateType } from "~/libs/anilist/updateType";
|
||||||
|
import { calculateExponentialBackoff } from "~/libs/calculateExponentialBackoff";
|
||||||
import type { QueueName } from "~/libs/tasks/queueName.ts";
|
import type { QueueName } from "~/libs/tasks/queueName.ts";
|
||||||
|
import {
|
||||||
|
MAX_QUEUE_DELAY_SECONDS,
|
||||||
|
type QueueBody,
|
||||||
|
} from "~/libs/tasks/queueTask";
|
||||||
|
import { maybeUpdateLastConnectedAt } from "~/middleware/maybeUpdateLastConnectedAt";
|
||||||
|
|
||||||
import { onNewEpisode } from "./controllers/internal/new-episode";
|
import { checkUpcomingTitles } from "./controllers/internal/upcoming-titles";
|
||||||
import type { QueueBody } from "./libs/tasks/queueTask";
|
|
||||||
|
|
||||||
export const app = new OpenAPIHono<{ Bindings: Env }>();
|
export const app = new OpenAPIHono<{ Bindings: Env }>();
|
||||||
|
|
||||||
@@ -72,28 +79,101 @@ app.get("/docs", swaggerUI({ url: "/openapi.json" }));
|
|||||||
export default {
|
export default {
|
||||||
fetch: app.fetch,
|
fetch: app.fetch,
|
||||||
async queue(batch) {
|
async queue(batch) {
|
||||||
switch (batch.queue as QueueName) {
|
onMessageQueue(batch, async (message, queueName) => {
|
||||||
case "ANILIST_UPDATES":
|
switch (queueName) {
|
||||||
batch.retryAll();
|
case "ANILIST_UPDATES":
|
||||||
break;
|
const anilistUpdateBody =
|
||||||
case "NEW_EPISODE":
|
message.body as QueueBody["ANILIST_UPDATES"];
|
||||||
for (const message of (batch as MessageBatch<QueueBody["NEW_EPISODE"]>)
|
console.log("queue run", message.body);
|
||||||
.messages) {
|
switch (anilistUpdateBody.updateType) {
|
||||||
|
case AnilistUpdateType.UpdateWatchStatus:
|
||||||
|
if (!anilistUpdateBody[AnilistUpdateType.UpdateWatchStatus]) {
|
||||||
|
console.error(
|
||||||
|
`Discarding update, unknown body ${JSON.stringify(message.body)}`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { updateWatchStatusOnAnilist } =
|
||||||
|
await import("~/controllers/watch-status/anilist");
|
||||||
|
const payload =
|
||||||
|
anilistUpdateBody[AnilistUpdateType.UpdateWatchStatus];
|
||||||
|
await updateWatchStatusOnAnilist(
|
||||||
|
payload.titleId,
|
||||||
|
payload.watchStatus,
|
||||||
|
payload.aniListToken,
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(
|
||||||
|
`Unhandled update type: ${anilistUpdateBody.updateType}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case "NEW_EPISODE":
|
||||||
|
const newEpisodeBody = message.body as QueueBody["NEW_EPISODE"];
|
||||||
await onNewEpisode(
|
await onNewEpisode(
|
||||||
message.body.aniListId,
|
newEpisodeBody.aniListId,
|
||||||
message.body.episodeNumber,
|
newEpisodeBody.episodeNumber,
|
||||||
);
|
);
|
||||||
message.ack();
|
break;
|
||||||
}
|
default:
|
||||||
break;
|
throw new Error(`Unhandled queue name: ${queueName}`);
|
||||||
}
|
}
|
||||||
|
});
|
||||||
},
|
},
|
||||||
async scheduled(event, env, ctx) {
|
async scheduled(event, env, ctx) {
|
||||||
const { processDelayedTasks } = await import(
|
switch (event.cron) {
|
||||||
"~/libs/tasks/processDelayedTasks"
|
case "0 */12 * * *":
|
||||||
);
|
const { processDelayedTasks } =
|
||||||
await processDelayedTasks(env, ctx);
|
await import("~/libs/tasks/processDelayedTasks");
|
||||||
|
await processDelayedTasks(env);
|
||||||
|
break;
|
||||||
|
case "0 18 * * *":
|
||||||
|
const { checkUpcomingTitles } =
|
||||||
|
await import("~/controllers/internal/upcoming-titles");
|
||||||
|
await checkUpcomingTitles();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unhandled cron: ${event.cron}`);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
} satisfies ExportedHandler<Env>;
|
} satisfies ExportedHandler<Env>;
|
||||||
|
|
||||||
|
const retryDelayConfig: Partial<
|
||||||
|
Record<QueueName, { min: DurationLike; max: DurationLike }>
|
||||||
|
> = {
|
||||||
|
NEW_EPISODE: {
|
||||||
|
min: Duration.fromObject({ hours: 1 }),
|
||||||
|
max: Duration.fromObject({ hours: 12 }),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function onMessageQueue<QN extends QueueName>(
|
||||||
|
messageBatch: MessageBatch<unknown>,
|
||||||
|
callback: (message: Message<QueueBody[QN]>, queueName: QN) => void,
|
||||||
|
) {
|
||||||
|
for (const message of messageBatch.messages) {
|
||||||
|
try {
|
||||||
|
callback(message as Message<QueueBody[QN]>, messageBatch.queue as QN);
|
||||||
|
message.ack();
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Failed to process message ${message.id} for queue ${messageBatch.queue} with body ${JSON.stringify(message.body)}`,
|
||||||
|
);
|
||||||
|
console.error(error);
|
||||||
|
message.retry({
|
||||||
|
delaySeconds: Math.min(
|
||||||
|
calculateExponentialBackoff({
|
||||||
|
attempt: message.attempts,
|
||||||
|
baseMin: retryDelayConfig[messageBatch.queue as QN]?.min,
|
||||||
|
absCap: retryDelayConfig[messageBatch.queue as QN]?.max,
|
||||||
|
}),
|
||||||
|
MAX_QUEUE_DELAY_SECONDS,
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export { AnilistDurableObject as AnilistDo } from "~/libs/anilist/anilist-do.ts";
|
export { AnilistDurableObject as AnilistDo } from "~/libs/anilist/anilist-do.ts";
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import type { TypedDocumentNode } from "@graphql-typed-document-node/core";
|
import type { TypedDocumentNode } from "@graphql-typed-document-node/core";
|
||||||
import { DurableObject } from "cloudflare:workers";
|
import { DurableObject } from "cloudflare:workers";
|
||||||
import { print } from "graphql";
|
import { print } from "graphql";
|
||||||
|
import { DateTime } from "luxon";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@@ -8,6 +9,7 @@ import {
|
|||||||
GetNextEpisodeAiringAtQuery,
|
GetNextEpisodeAiringAtQuery,
|
||||||
GetPopularTitlesQuery,
|
GetPopularTitlesQuery,
|
||||||
GetTitleQuery,
|
GetTitleQuery,
|
||||||
|
GetTitleUserDataQuery,
|
||||||
GetTrendingTitlesQuery,
|
GetTrendingTitlesQuery,
|
||||||
GetUpcomingTitlesQuery,
|
GetUpcomingTitlesQuery,
|
||||||
GetUserProfileQuery,
|
GetUserProfileQuery,
|
||||||
@@ -18,6 +20,7 @@ import {
|
|||||||
SearchQuery,
|
SearchQuery,
|
||||||
} from "~/libs/anilist/queries";
|
} from "~/libs/anilist/queries";
|
||||||
import { sleep } from "~/libs/sleep.ts";
|
import { sleep } from "~/libs/sleep.ts";
|
||||||
|
import type { Title } from "~/types/title";
|
||||||
|
|
||||||
const nextAiringEpisodeSchema = z.nullable(
|
const nextAiringEpisodeSchema = z.nullable(
|
||||||
z.object({
|
z.object({
|
||||||
@@ -38,30 +41,54 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
return new Response("Not found", { status: 404 });
|
return new Response("Not found", { status: 404 });
|
||||||
}
|
}
|
||||||
|
|
||||||
async getTitle(id: number, token?: string) {
|
async getTitle(
|
||||||
return this.handleCachedRequest(
|
id: number,
|
||||||
`title:${id}`,
|
userId?: number,
|
||||||
async () => {
|
token?: string,
|
||||||
const anilistResponse = await this.fetchFromAnilist(
|
): Promise<Title | null> {
|
||||||
GetTitleQuery,
|
const promises: Promise<any>[] = [
|
||||||
{ id },
|
this.handleCachedRequest(
|
||||||
token,
|
`title:${id}`,
|
||||||
);
|
async () => {
|
||||||
return anilistResponse?.Media ?? null;
|
const anilistResponse = await this.fetchFromAnilist(GetTitleQuery, {
|
||||||
},
|
id,
|
||||||
(media) => {
|
});
|
||||||
if (!media) return undefined;
|
return anilistResponse?.Media ?? null;
|
||||||
// Cast to any to access fragment fields without unmasking
|
},
|
||||||
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
|
(media) => {
|
||||||
(media as any)?.nextAiringEpisode,
|
if (!media) return undefined;
|
||||||
);
|
|
||||||
const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000;
|
// Cast to any to access fragment fields without unmasking
|
||||||
if (airingAt) {
|
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
|
||||||
return airingAt - Date.now();
|
(media as any)?.nextAiringEpisode,
|
||||||
}
|
);
|
||||||
return undefined;
|
return nextAiringEpisode?.airingAt
|
||||||
},
|
? DateTime.fromMillis(nextAiringEpisode?.airingAt)
|
||||||
|
: undefined;
|
||||||
|
},
|
||||||
|
),
|
||||||
|
];
|
||||||
|
promises.push(
|
||||||
|
userId
|
||||||
|
? this.handleCachedRequest(
|
||||||
|
`title:${id}:${userId}`,
|
||||||
|
async () => {
|
||||||
|
const anilistResponse = await this.fetchFromAnilist(
|
||||||
|
GetTitleUserDataQuery,
|
||||||
|
{ id },
|
||||||
|
{ token },
|
||||||
|
);
|
||||||
|
return anilistResponse?.Media ?? null;
|
||||||
|
},
|
||||||
|
DateTime.now().plus({ days: 1 }),
|
||||||
|
)
|
||||||
|
: Promise.resolve({ mediaListEntry: null }),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
return Promise.all(promises).then(([title, userTitle]) => ({
|
||||||
|
...title,
|
||||||
|
...userTitle,
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
async getNextEpisodeAiringAt(id: number) {
|
async getNextEpisodeAiringAt(id: number) {
|
||||||
@@ -73,7 +100,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
});
|
});
|
||||||
return data?.Media;
|
return data?.Media;
|
||||||
},
|
},
|
||||||
60 * 60 * 1000,
|
DateTime.now().plus({ hours: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -88,7 +115,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
});
|
});
|
||||||
return data?.Page;
|
return data?.Page;
|
||||||
},
|
},
|
||||||
60 * 60 * 1000,
|
DateTime.now().plus({ hours: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,8 +128,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
) {
|
) {
|
||||||
return this.handleCachedRequest(
|
return this.handleCachedRequest(
|
||||||
`popular:${JSON.stringify({ season, seasonYear, nextSeason, nextYear, limit })}`,
|
`popular:${JSON.stringify({ season, seasonYear, nextSeason, nextYear, limit })}`,
|
||||||
async () => {
|
() => {
|
||||||
console.log(nextSeason, nextYear, print(BrowsePopularQuery));
|
|
||||||
return this.fetchFromAnilist(BrowsePopularQuery, {
|
return this.fetchFromAnilist(BrowsePopularQuery, {
|
||||||
season,
|
season,
|
||||||
seasonYear,
|
seasonYear,
|
||||||
@@ -111,21 +137,27 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
limit,
|
limit,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
24 * 60 * 60 * 1000,
|
DateTime.now().plus({ days: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async nextSeasonPopular(nextSeason: any, nextYear: number, limit: number) {
|
async nextSeasonPopular(
|
||||||
|
nextSeason: any,
|
||||||
|
nextYear: number,
|
||||||
|
page: number,
|
||||||
|
limit: number,
|
||||||
|
) {
|
||||||
return this.handleCachedRequest(
|
return this.handleCachedRequest(
|
||||||
`next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`,
|
`next_season:${JSON.stringify({ nextSeason, nextYear, page, limit })}`,
|
||||||
async () => {
|
async () => {
|
||||||
return this.fetchFromAnilist(NextSeasonPopularQuery, {
|
return this.fetchFromAnilist(NextSeasonPopularQuery, {
|
||||||
nextSeason,
|
nextSeason,
|
||||||
nextYear,
|
nextYear,
|
||||||
limit,
|
limit,
|
||||||
});
|
page,
|
||||||
|
}).then((data) => data?.Page);
|
||||||
},
|
},
|
||||||
24 * 60 * 60 * 1000,
|
DateTime.now().plus({ days: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -138,15 +170,14 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
return this.handleCachedRequest(
|
return this.handleCachedRequest(
|
||||||
`popular:${JSON.stringify({ page, limit, season, seasonYear })}`,
|
`popular:${JSON.stringify({ page, limit, season, seasonYear })}`,
|
||||||
async () => {
|
async () => {
|
||||||
const data = await this.fetchFromAnilist(GetPopularTitlesQuery, {
|
return this.fetchFromAnilist(GetPopularTitlesQuery, {
|
||||||
page,
|
page,
|
||||||
limit,
|
limit,
|
||||||
season,
|
season,
|
||||||
seasonYear,
|
seasonYear,
|
||||||
});
|
}).then((data) => data?.Page);
|
||||||
return data?.Page;
|
|
||||||
},
|
},
|
||||||
24 * 60 * 60 * 1000,
|
DateTime.now().plus({ days: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -160,7 +191,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
});
|
});
|
||||||
return data?.Page;
|
return data?.Page;
|
||||||
},
|
},
|
||||||
24 * 60 * 60 * 1000,
|
DateTime.now().plus({ days: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,7 +210,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
});
|
});
|
||||||
return data?.Page;
|
return data?.Page;
|
||||||
},
|
},
|
||||||
24 * 60 * 60 * 1000,
|
DateTime.now().plus({ days: 1 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -187,10 +218,10 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
return this.handleCachedRequest(
|
return this.handleCachedRequest(
|
||||||
`user:${token}`,
|
`user:${token}`,
|
||||||
async () => {
|
async () => {
|
||||||
const data = await this.fetchFromAnilist(GetUserQuery, {}, token);
|
const data = await this.fetchFromAnilist(GetUserQuery, {}, { token });
|
||||||
return data?.Viewer;
|
return data?.Viewer;
|
||||||
},
|
},
|
||||||
60 * 60 * 24 * 30 * 1000,
|
DateTime.now().plus({ days: 30 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -201,11 +232,11 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
const data = await this.fetchFromAnilist(
|
const data = await this.fetchFromAnilist(
|
||||||
GetUserProfileQuery,
|
GetUserProfileQuery,
|
||||||
{ token },
|
{ token },
|
||||||
token,
|
{ token },
|
||||||
);
|
);
|
||||||
return data?.Viewer;
|
return data?.Viewer;
|
||||||
},
|
},
|
||||||
60 * 60 * 24 * 30 * 1000,
|
DateTime.now().plus({ days: 30 }),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -217,7 +248,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
const data = await this.fetchFromAnilist(
|
const data = await this.fetchFromAnilist(
|
||||||
MarkEpisodeAsWatchedMutation,
|
MarkEpisodeAsWatchedMutation,
|
||||||
{ titleId, episodeNumber },
|
{ titleId, episodeNumber },
|
||||||
token,
|
{ token },
|
||||||
);
|
);
|
||||||
return data?.SaveMediaListEntry;
|
return data?.SaveMediaListEntry;
|
||||||
}
|
}
|
||||||
@@ -226,7 +257,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
const data = await this.fetchFromAnilist(
|
const data = await this.fetchFromAnilist(
|
||||||
MarkTitleAsWatchedMutation,
|
MarkTitleAsWatchedMutation,
|
||||||
{ titleId },
|
{ titleId },
|
||||||
token,
|
{ token },
|
||||||
);
|
);
|
||||||
return data?.SaveMediaListEntry;
|
return data?.SaveMediaListEntry;
|
||||||
}
|
}
|
||||||
@@ -235,7 +266,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
async handleCachedRequest<T>(
|
async handleCachedRequest<T>(
|
||||||
key: string,
|
key: string,
|
||||||
fetcher: () => Promise<T>,
|
fetcher: () => Promise<T>,
|
||||||
ttl?: number | ((data: T) => number | undefined),
|
ttl?: DateTime | ((data: T) => DateTime | undefined),
|
||||||
) {
|
) {
|
||||||
const cache = await this.state.storage.get(key);
|
const cache = await this.state.storage.get(key);
|
||||||
console.debug(`Retrieving request ${key} from cache:`, cache != null);
|
console.debug(`Retrieving request ${key} from cache:`, cache != null);
|
||||||
@@ -247,9 +278,8 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
await this.state.storage.put(key, result);
|
await this.state.storage.put(key, result);
|
||||||
|
|
||||||
const calculatedTtl = typeof ttl === "function" ? ttl(result) : ttl;
|
const calculatedTtl = typeof ttl === "function" ? ttl(result) : ttl;
|
||||||
|
if (calculatedTtl) {
|
||||||
if (calculatedTtl && calculatedTtl > 0) {
|
const alarmTime = calculatedTtl.toMillis();
|
||||||
const alarmTime = Date.now() + calculatedTtl;
|
|
||||||
await this.state.storage.setAlarm(alarmTime);
|
await this.state.storage.setAlarm(alarmTime);
|
||||||
await this.state.storage.put(`alarm:${key}`, alarmTime);
|
await this.state.storage.put(`alarm:${key}`, alarmTime);
|
||||||
}
|
}
|
||||||
@@ -260,11 +290,13 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
async alarm() {
|
async alarm() {
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
const alarms = await this.state.storage.list({ prefix: "alarm:" });
|
const alarms = await this.state.storage.list({ prefix: "alarm:" });
|
||||||
|
console.debug(`Retrieved alarms from cache:`, Object.entries(alarms));
|
||||||
for (const [key, ttl] of Object.entries(alarms)) {
|
for (const [key, ttl] of Object.entries(alarms)) {
|
||||||
if (now >= ttl) {
|
if (now >= ttl) {
|
||||||
// The key in alarms is `alarm:${storageKey}`
|
// The key in alarms is `alarm:${storageKey}`
|
||||||
// We want to delete the storageKey
|
// We want to delete the storageKey
|
||||||
const storageKey = key.replace("alarm:", "");
|
const storageKey = key.replace("alarm:", "");
|
||||||
|
console.debug(`Deleting storage key ${storageKey} & alarm ${key}`);
|
||||||
await this.state.storage.delete(storageKey);
|
await this.state.storage.delete(storageKey);
|
||||||
await this.state.storage.delete(key);
|
await this.state.storage.delete(key);
|
||||||
}
|
}
|
||||||
@@ -272,10 +304,13 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fetchFromAnilist<Result = any, Variables = any>(
|
async fetchFromAnilist<Result = any, Variables = any>(
|
||||||
queryString: string,
|
query: TypedDocumentNode<Result, Variables>,
|
||||||
variables: Variables,
|
variables: Variables,
|
||||||
token?: string | undefined,
|
{
|
||||||
): Promise<Result> {
|
token,
|
||||||
|
shouldRetryOnRateLimit = true,
|
||||||
|
}: { token?: string | undefined; shouldRetryOnRateLimit?: boolean } = {},
|
||||||
|
): Promise<Result | undefined> {
|
||||||
const headers: any = {
|
const headers: any = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
};
|
};
|
||||||
@@ -286,7 +321,7 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
|
|
||||||
// Use the query passed in, or fallback if needed (though we expect it to be passed)
|
// Use the query passed in, or fallback if needed (though we expect it to be passed)
|
||||||
// We print the query to string
|
// We print the query to string
|
||||||
// const queryString = print(query);
|
const queryString = print(query);
|
||||||
|
|
||||||
const response = await fetch(`${this.env.PROXY_URL}/proxy`, {
|
const response = await fetch(`${this.env.PROXY_URL}/proxy`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
@@ -305,14 +340,17 @@ export class AnilistDurableObject extends DurableObject {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// 1. Handle Rate Limiting (429)
|
// 1. Handle Rate Limiting (429)
|
||||||
if (response.status === 429) {
|
if (shouldRetryOnRateLimit && response.status === 429) {
|
||||||
const retryAfter = await response
|
const retryAfter = await response
|
||||||
.json()
|
.json<{ headers: Record<string, string> }>()
|
||||||
.then(({ headers }) => new Headers(headers).get("Retry-After"));
|
.then(({ headers }) => new Headers(headers).get("Retry-After"));
|
||||||
console.log("429, retrying in", retryAfter);
|
console.log("429, retrying in", retryAfter);
|
||||||
|
|
||||||
await sleep(Number(retryAfter || 1) * 1000); // specific fallback or ensure logic
|
await sleep(Number(retryAfter || 1) * 1000); // specific fallback or ensure logic
|
||||||
return this.fetchFromAnilist(query, variables, token);
|
return this.fetchFromAnilist(query, variables, {
|
||||||
|
token,
|
||||||
|
shouldRetryOnRateLimit: false,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Handle HTTP Errors (like 404 or 500)
|
// 2. Handle HTTP Errors (like 404 or 500)
|
||||||
|
|||||||
@@ -5,10 +5,11 @@ import type { Title } from "~/types/title";
|
|||||||
|
|
||||||
export async function fetchTitleFromAnilist(
|
export async function fetchTitleFromAnilist(
|
||||||
id: number,
|
id: number,
|
||||||
|
userId?: number | undefined,
|
||||||
token?: string | undefined,
|
token?: string | undefined,
|
||||||
): Promise<Title | undefined> {
|
): Promise<Title | undefined> {
|
||||||
if (useMockData()) {
|
if (useMockData()) {
|
||||||
const { mockTitleDetails } = await import("~/mocks/mockData");
|
const { mockTitleDetails } = await import("~/mocks");
|
||||||
return mockTitleDetails();
|
return mockTitleDetails();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -17,8 +18,7 @@ export async function fetchTitleFromAnilist(
|
|||||||
);
|
);
|
||||||
const stub = env.ANILIST_DO.get(durableObjectId);
|
const stub = env.ANILIST_DO.get(durableObjectId);
|
||||||
|
|
||||||
const data = await stub.getTitle(id, token);
|
const data = await stub.getTitle(id, userId, token);
|
||||||
|
|
||||||
if (!data) {
|
if (!data) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,18 @@ export const GetTitleQuery = graphql(
|
|||||||
[MediaFragment],
|
[MediaFragment],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
export const GetTitleUserDataQuery = graphql(`
|
||||||
|
query GetTitleUserData($id: Int!) {
|
||||||
|
Media(id: $id) {
|
||||||
|
mediaListEntry {
|
||||||
|
id
|
||||||
|
progress
|
||||||
|
status
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`);
|
||||||
|
|
||||||
export const SearchQuery = graphql(
|
export const SearchQuery = graphql(
|
||||||
`
|
`
|
||||||
query Search($query: String!, $page: Int!, $limit: Int!) {
|
query Search($query: String!, $page: Int!, $limit: Int!) {
|
||||||
@@ -247,8 +259,9 @@ export const NextSeasonPopularQuery = graphql(
|
|||||||
$nextSeason: MediaSeason
|
$nextSeason: MediaSeason
|
||||||
$nextYear: Int
|
$nextYear: Int
|
||||||
$limit: Int!
|
$limit: Int!
|
||||||
|
$page: Int!
|
||||||
) {
|
) {
|
||||||
Page(page: 1, perPage: $limit) {
|
Page(page: $page, perPage: $limit) {
|
||||||
media(
|
media(
|
||||||
season: $nextSeason
|
season: $nextSeason
|
||||||
seasonYear: $nextYear
|
seasonYear: $nextYear
|
||||||
|
|||||||
53
src/libs/calculateExponentialBackoff.ts
Normal file
53
src/libs/calculateExponentialBackoff.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { Duration, type DurationLike } from "luxon";
|
||||||
|
|
||||||
|
interface CalculateExponentialBackoffOptions {
|
||||||
|
attempt: number;
|
||||||
|
baseMin?: DurationLike;
|
||||||
|
absCap?: DurationLike;
|
||||||
|
fuzzFactor?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a backoff time where both the Minimum floor and Maximum ceiling
|
||||||
|
* are "fuzzed" with jitter to prevent clustering at the edges.
|
||||||
|
*
|
||||||
|
* @param attempt - The current retry attempt (0-indexed).
|
||||||
|
* @param baseMin - The nominal minimum wait time (default: 1s).
|
||||||
|
* @param absCap - The absolute maximum wait time (default: 60s).
|
||||||
|
* @param fuzzFactor - How much to wobble the edges (0.1 = +/- 10%).
|
||||||
|
*
|
||||||
|
* @returns A random duration between the nominal minimum and maximum, in seconds.
|
||||||
|
*/
|
||||||
|
export function calculateExponentialBackoff({
|
||||||
|
attempt,
|
||||||
|
baseMin: baseMinDuration = Duration.fromObject({ minutes: 1 }),
|
||||||
|
absCap: absCapDuration = Duration.fromObject({ hours: 1 }),
|
||||||
|
fuzzFactor = 0.2,
|
||||||
|
}: CalculateExponentialBackoffOptions): number {
|
||||||
|
const baseMin = Duration.fromDurationLike(baseMinDuration).as("seconds");
|
||||||
|
const absCap = Duration.fromDurationLike(absCapDuration).as("seconds");
|
||||||
|
|
||||||
|
// 1. Calculate nominal boundaries
|
||||||
|
// Example: If baseMin is 1s, the nominal boundaries are 1s, 2s, 4s, 8s... (The 'ceiling' grows exponentially)
|
||||||
|
const nominalMin = baseMin;
|
||||||
|
const nominalCeiling = Math.min(baseMin * Math.pow(2, attempt), absCap);
|
||||||
|
|
||||||
|
// 2. Fuzz the Min (The Floor)
|
||||||
|
// Example: If min is 1s and fuzz is 0.2, the floor becomes random between 0.8s and 1.2s
|
||||||
|
const minFuzz = nominalMin * fuzzFactor;
|
||||||
|
const fuzzedMin = nominalMin + (Math.random() * 2 * minFuzz - minFuzz);
|
||||||
|
|
||||||
|
// 3. Fuzz the Max (The Ceiling)
|
||||||
|
// Example: If ceiling is 4s (and fuzz is 0.2), it becomes random between 3.2s and 4.8s
|
||||||
|
const maxFuzz = nominalCeiling * fuzzFactor;
|
||||||
|
const fuzzedCeiling =
|
||||||
|
nominalCeiling + (Math.random() * 2 * maxFuzz - maxFuzz);
|
||||||
|
|
||||||
|
// Safety: Ensure we don't return a negative number or cross boundaries weirdly
|
||||||
|
// (e.g. if fuzz makes min > max, we swap or clamp)
|
||||||
|
const safeMin = Math.max(0, fuzzedMin);
|
||||||
|
const safeMax = Math.max(safeMin, fuzzedCeiling);
|
||||||
|
|
||||||
|
// 4. Return random value in the new fuzzy range
|
||||||
|
return safeMin + Math.random() * (safeMax - safeMin);
|
||||||
|
}
|
||||||
@@ -3,11 +3,13 @@ import mapKeys from "lodash.mapkeys";
|
|||||||
|
|
||||||
import { Case, changeStringCase } from "../changeStringCase";
|
import { Case, changeStringCase } from "../changeStringCase";
|
||||||
|
|
||||||
export function getAdminSdkCredentials(env: Cloudflare.Env = cloudflareEnv) {
|
export function getAdminSdkCredentials(
|
||||||
|
env: Cloudflare.Env = cloudflareEnv,
|
||||||
|
): AdminSdkCredentials {
|
||||||
return mapKeys(
|
return mapKeys(
|
||||||
JSON.parse(env.ADMIN_SDK_JSON) as AdminSdkCredentials,
|
JSON.parse(env.ADMIN_SDK_JSON) as AdminSdkCredentials,
|
||||||
(_, key) => changeStringCase(key, Case.snake_case, Case.camelCase),
|
(_, key) => changeStringCase(key, Case.snake_case, Case.camelCase),
|
||||||
);
|
) satisfies AdminSdkCredentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AdminSdkCredentials {
|
export interface AdminSdkCredentials {
|
||||||
|
|||||||
@@ -1,204 +1,158 @@
|
|||||||
|
import { env } from "cloudflare:test";
|
||||||
|
import { DateTime } from "luxon";
|
||||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||||
|
|
||||||
|
import { getTestEnv } from "../test/getTestEnv";
|
||||||
import { processDelayedTasks } from "./processDelayedTasks";
|
import { processDelayedTasks } from "./processDelayedTasks";
|
||||||
|
|
||||||
describe("processDelayedTasks", () => {
|
describe("processDelayedTasks", () => {
|
||||||
let mockEnv: Cloudflare.Env;
|
beforeEach(async () => {
|
||||||
let mockCtx: ExecutionContext;
|
const tasksToDelete = await env.DELAYED_TASKS.list({
|
||||||
let kvGetSpy: ReturnType<typeof vi.fn>;
|
prefix: "delayed-task:",
|
||||||
let kvDeleteSpy: ReturnType<typeof vi.fn>;
|
});
|
||||||
let kvPutSpy: ReturnType<typeof vi.fn>;
|
console.log(`Found ${tasksToDelete.keys.length} tasks to delete`);
|
||||||
let queueSendSpy: ReturnType<typeof vi.fn>;
|
for (const task of tasksToDelete.keys) {
|
||||||
|
await env.DELAYED_TASKS.delete(task.name);
|
||||||
beforeEach(() => {
|
}
|
||||||
kvGetSpy = vi.fn(() => Promise.resolve(null));
|
|
||||||
kvDeleteSpy = vi.fn(() => Promise.resolve());
|
|
||||||
kvPutSpy = vi.fn(() => Promise.resolve());
|
|
||||||
queueSendSpy = vi.fn(() => Promise.resolve());
|
|
||||||
|
|
||||||
mockEnv = {
|
|
||||||
DELAYED_TASKS: {
|
|
||||||
get: kvGetSpy,
|
|
||||||
delete: kvDeleteSpy,
|
|
||||||
put: kvPutSpy,
|
|
||||||
list: vi.fn(() =>
|
|
||||||
Promise.resolve({
|
|
||||||
keys: [],
|
|
||||||
list_complete: true as const,
|
|
||||||
cacheStatus: null,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
getWithMetadata: vi.fn(() =>
|
|
||||||
Promise.resolve({ value: null, metadata: null }),
|
|
||||||
),
|
|
||||||
} as any,
|
|
||||||
NEW_EPISODE: {
|
|
||||||
send: queueSendSpy,
|
|
||||||
} as any,
|
|
||||||
ANILIST_UPDATES: {
|
|
||||||
send: vi.fn(() => Promise.resolve()),
|
|
||||||
} as any,
|
|
||||||
} as any;
|
|
||||||
|
|
||||||
mockCtx = {
|
|
||||||
waitUntil: vi.fn(() => {}),
|
|
||||||
passThroughOnException: vi.fn(() => {}),
|
|
||||||
} as any;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("handles empty KV namespace", async () => {
|
it("handles empty KV namespace", async () => {
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
await processDelayedTasks(env);
|
||||||
|
|
||||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
await expect(
|
||||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
env.DELAYED_TASKS.list({ prefix: "delayed-task:" }).then(
|
||||||
|
(result) => result.keys,
|
||||||
|
),
|
||||||
|
).resolves.toHaveLength(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
it("queues tasks within 12 hours of scheduled time", async () => {
|
it("queues tasks within 9 hours of scheduled time", async () => {
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const now = DateTime.now();
|
||||||
const scheduledTime = now + 6 * 3600; // 6 hours from now
|
const scheduledTime = now.plus({ hours: 6 }).toSeconds();
|
||||||
|
|
||||||
const taskMetadata = {
|
const taskMetadata = {
|
||||||
queueName: "NEW_EPISODE",
|
queueName: "NEW_EPISODE",
|
||||||
body: { aniListId: 123, episodeNumber: 1 },
|
body: { aniListId: 123, episodeNumber: 1 },
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
scheduledEpochTime: scheduledTime,
|
scheduledEpochTime: scheduledTime,
|
||||||
taskId: "task-1",
|
taskId: "task-1",
|
||||||
createdAt: now - 18 * 3600,
|
createdAt: now.minus({ hours: 18 }).toSeconds(),
|
||||||
retryCount: 0,
|
retryCount: 0,
|
||||||
};
|
};
|
||||||
|
await env.DELAYED_TASKS.put(
|
||||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
|
||||||
Promise.resolve({
|
|
||||||
keys: [{ name: `delayed-task:${scheduledTime}:task-1` }],
|
|
||||||
list_complete: true as const,
|
|
||||||
cacheStatus: null,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
|
||||||
|
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
|
||||||
|
|
||||||
expect(queueSendSpy).toHaveBeenCalledTimes(1);
|
|
||||||
expect(kvDeleteSpy).toHaveBeenCalledTimes(1);
|
|
||||||
expect(kvDeleteSpy).toHaveBeenCalledWith(
|
|
||||||
`delayed-task:${scheduledTime}:task-1`,
|
`delayed-task:${scheduledTime}:task-1`,
|
||||||
|
JSON.stringify(taskMetadata),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
await processDelayedTasks(env);
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-1`),
|
||||||
|
).resolves.toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("does not queue tasks beyond 12 hours", async () => {
|
it("does not queue tasks beyond 9 hours", async () => {
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const now = DateTime.now();
|
||||||
const scheduledTime = now + 24 * 3600; // 24 hours from now
|
const scheduledTime = now.plus({ hours: 24 }).toSeconds();
|
||||||
|
|
||||||
const taskMetadata = {
|
const taskMetadata = {
|
||||||
queueName: "NEW_EPISODE",
|
queueName: "NEW_EPISODE",
|
||||||
body: { aniListId: 456, episodeNumber: 2 },
|
body: { aniListId: 456, episodeNumber: 2 },
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
scheduledEpochTime: scheduledTime,
|
scheduledEpochTime: scheduledTime,
|
||||||
taskId: "task-2",
|
taskId: "task-2",
|
||||||
createdAt: now,
|
createdAt: now.toSeconds(),
|
||||||
retryCount: 0,
|
retryCount: 0,
|
||||||
};
|
};
|
||||||
|
await env.DELAYED_TASKS.put(
|
||||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
`delayed-task:${scheduledTime}:task-2`,
|
||||||
Promise.resolve({
|
JSON.stringify(taskMetadata),
|
||||||
keys: [{ name: `delayed-task:${scheduledTime}:task-2` }],
|
|
||||||
list_complete: true as const,
|
|
||||||
cacheStatus: null,
|
|
||||||
}),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
await processDelayedTasks(env);
|
||||||
|
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
await expect(
|
||||||
|
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-2`),
|
||||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
).resolves.toBeTruthy();
|
||||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("increments retry count on queue failure", async () => {
|
it("increments retry count on queue failure", async () => {
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const now = DateTime.now();
|
||||||
const scheduledTime = now + 1 * 3600; // 1 hour from now
|
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
|
||||||
|
|
||||||
const taskMetadata = {
|
const taskMetadata = {
|
||||||
queueName: "NEW_EPISODE",
|
queueName: "NEW_EPISODE",
|
||||||
body: { aniListId: 789, episodeNumber: 3 },
|
body: { aniListId: 789, episodeNumber: 3 },
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
scheduledEpochTime: scheduledTime,
|
scheduledEpochTime: scheduledTime,
|
||||||
taskId: "task-3",
|
taskId: "task-3",
|
||||||
createdAt: now - 23 * 3600,
|
createdAt: now.minus({ hours: 23 }).toSeconds(),
|
||||||
retryCount: 0,
|
retryCount: 0,
|
||||||
};
|
};
|
||||||
|
const mockEnv = getTestEnv({
|
||||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
NEW_EPISODE: {
|
||||||
Promise.resolve({
|
send: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||||
keys: [{ name: `delayed-task:${scheduledTime}:task-3` }],
|
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||||
list_complete: true as const,
|
},
|
||||||
cacheStatus: null,
|
});
|
||||||
}),
|
await mockEnv.DELAYED_TASKS.put(
|
||||||
|
`delayed-task:${scheduledTime}:task-3`,
|
||||||
|
JSON.stringify(taskMetadata),
|
||||||
);
|
);
|
||||||
|
|
||||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
await processDelayedTasks(mockEnv);
|
||||||
queueSendSpy.mockRejectedValue(new Error("Queue error"));
|
|
||||||
|
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
const updatedMetadata = JSON.parse(
|
||||||
|
(await mockEnv.DELAYED_TASKS.get(
|
||||||
expect(kvPutSpy).toHaveBeenCalledTimes(1);
|
`delayed-task:${scheduledTime}:task-3`,
|
||||||
const updatedMetadata = JSON.parse(kvPutSpy.mock.calls[0][1]);
|
))!,
|
||||||
|
);
|
||||||
expect(updatedMetadata.retryCount).toBe(1);
|
expect(updatedMetadata.retryCount).toBe(1);
|
||||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it("logs alert after 3 failed attempts", async () => {
|
it("logs alert after 3 failed attempts", async () => {
|
||||||
const consoleErrorSpy = vi.fn(() => {});
|
const consoleErrorSpy = vi.fn(() => {});
|
||||||
const originalConsoleError = console.error;
|
const originalConsoleError = console.error;
|
||||||
console.error = consoleErrorSpy as any;
|
console.error = consoleErrorSpy as any;
|
||||||
|
const now = DateTime.now();
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
|
||||||
const scheduledTime = now + 1 * 3600;
|
|
||||||
|
|
||||||
const taskMetadata = {
|
const taskMetadata = {
|
||||||
queueName: "NEW_EPISODE",
|
queueName: "NEW_EPISODE",
|
||||||
body: { aniListId: 999, episodeNumber: 4 },
|
body: { aniListId: 789, episodeNumber: 4 },
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
scheduledEpochTime: scheduledTime,
|
scheduledEpochTime: scheduledTime,
|
||||||
taskId: "task-4",
|
taskId: "task-4",
|
||||||
createdAt: now - 23 * 3600,
|
createdAt: now.minus({ hours: 23 }).toSeconds(),
|
||||||
retryCount: 2, // Will become 3 after this failure
|
retryCount: 2,
|
||||||
};
|
};
|
||||||
|
const mockEnv = getTestEnv({
|
||||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
NEW_EPISODE: {
|
||||||
Promise.resolve({
|
send: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||||
keys: [{ name: `delayed-task:${scheduledTime}:task-4` }],
|
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||||
list_complete: true as const,
|
},
|
||||||
cacheStatus: null,
|
});
|
||||||
}),
|
await mockEnv.DELAYED_TASKS.put(
|
||||||
|
`delayed-task:${scheduledTime}:task-4`,
|
||||||
|
JSON.stringify(taskMetadata),
|
||||||
);
|
);
|
||||||
|
|
||||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
await processDelayedTasks(mockEnv);
|
||||||
queueSendSpy.mockRejectedValue(new Error("Queue error"));
|
|
||||||
|
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
|
||||||
|
|
||||||
// Check that alert was logged
|
// Check that alert was logged
|
||||||
const alertCalls = consoleErrorSpy.mock.calls.filter((call: any) =>
|
const alertCalls = consoleErrorSpy.mock.calls.filter((call: any) =>
|
||||||
call[0]?.includes("🚨 ALERT"),
|
call[0]?.includes("🚨 ALERT"),
|
||||||
);
|
);
|
||||||
expect(alertCalls.length).toBeGreaterThan(0);
|
expect(alertCalls.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
console.error = originalConsoleError;
|
console.error = originalConsoleError;
|
||||||
});
|
});
|
||||||
|
|
||||||
it("handles multiple tasks in single cron run", async () => {
|
it("handles multiple tasks in single cron run", async () => {
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const now = DateTime.now();
|
||||||
|
|
||||||
const task1Metadata = {
|
const task1Metadata = {
|
||||||
queueName: "NEW_EPISODE",
|
queueName: "NEW_EPISODE",
|
||||||
body: { aniListId: 100, episodeNumber: 1 },
|
body: { aniListId: 100, episodeNumber: 1 },
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
scheduledEpochTime: now + 2 * 3600,
|
scheduledEpochTime: now.plus({ hours: 2 }).toSeconds(),
|
||||||
taskId: "task-1",
|
taskId: "task-1",
|
||||||
createdAt: now - 20 * 3600,
|
createdAt: now.minus({ hours: 20 }).toSeconds(),
|
||||||
retryCount: 0,
|
retryCount: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -206,47 +160,53 @@ describe("processDelayedTasks", () => {
|
|||||||
queueName: "NEW_EPISODE",
|
queueName: "NEW_EPISODE",
|
||||||
body: { aniListId: 200, episodeNumber: 2 },
|
body: { aniListId: 200, episodeNumber: 2 },
|
||||||
headers: { "Content-Type": "application/json" },
|
headers: { "Content-Type": "application/json" },
|
||||||
scheduledEpochTime: now + 5 * 3600,
|
scheduledEpochTime: now.plus({ hours: 5 }).toSeconds(),
|
||||||
taskId: "task-2",
|
taskId: "task-2",
|
||||||
createdAt: now - 19 * 3600,
|
createdAt: now.minus({ hours: 19 }).toSeconds(),
|
||||||
retryCount: 0,
|
retryCount: 0,
|
||||||
};
|
};
|
||||||
|
await env.DELAYED_TASKS.put(
|
||||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
|
||||||
Promise.resolve({
|
JSON.stringify(task1Metadata),
|
||||||
keys: [
|
);
|
||||||
{ name: `delayed-task:${task1Metadata.scheduledEpochTime}:task-1` },
|
await env.DELAYED_TASKS.put(
|
||||||
{ name: `delayed-task:${task2Metadata.scheduledEpochTime}:task-2` },
|
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
|
||||||
],
|
JSON.stringify(task2Metadata),
|
||||||
list_complete: true as const,
|
|
||||||
cacheStatus: null,
|
|
||||||
}),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
kvGetSpy
|
await processDelayedTasks(env);
|
||||||
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task1Metadata)))
|
|
||||||
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task2Metadata)));
|
|
||||||
|
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
await expect(
|
||||||
|
env.DELAYED_TASKS.get(
|
||||||
expect(queueSendSpy).toHaveBeenCalledTimes(2);
|
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
|
||||||
expect(kvDeleteSpy).toHaveBeenCalledTimes(2);
|
),
|
||||||
|
).resolves.toBeNull();
|
||||||
|
await expect(
|
||||||
|
env.DELAYED_TASKS.get(
|
||||||
|
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
|
||||||
|
),
|
||||||
|
).resolves.toBeNull();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("skips tasks with null values in KV", async () => {
|
it("skips tasks with null values in KV", async () => {
|
||||||
mockEnv.DELAYED_TASKS.list = vi.fn(() =>
|
const queueSendSpy = vi.fn().mockResolvedValue(undefined);
|
||||||
Promise.resolve({
|
const mockEnv = getTestEnv({
|
||||||
keys: [{ name: "delayed-task:123:invalid" }],
|
NEW_EPISODE: {
|
||||||
list_complete: true as const,
|
send: queueSendSpy,
|
||||||
cacheStatus: null,
|
sendBatch: queueSendSpy,
|
||||||
}),
|
},
|
||||||
);
|
ANILIST_UPDATES: {
|
||||||
|
send: queueSendSpy,
|
||||||
|
sendBatch: queueSendSpy,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await mockEnv.DELAYED_TASKS.put(`delayed-task:123:invalid`, null);
|
||||||
|
|
||||||
kvGetSpy.mockReturnValue(Promise.resolve(null));
|
await processDelayedTasks(mockEnv);
|
||||||
|
|
||||||
await processDelayedTasks(mockEnv, mockCtx);
|
|
||||||
|
|
||||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
await expect(
|
||||||
|
mockEnv.DELAYED_TASKS.get(`delayed-task:123:invalid`),
|
||||||
|
).resolves.toBeNull();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,15 +2,11 @@ import { DateTime } from "luxon";
|
|||||||
|
|
||||||
import type { DelayedTaskMetadata } from "./delayedTask";
|
import type { DelayedTaskMetadata } from "./delayedTask";
|
||||||
import { deserializeDelayedTask } from "./delayedTask";
|
import { deserializeDelayedTask } from "./delayedTask";
|
||||||
import { queueTask } from "./queueTask";
|
import { MAX_QUEUE_DELAY_SECONDS, queueTask } from "./queueTask";
|
||||||
|
|
||||||
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
|
|
||||||
const RETRY_ALERT_THRESHOLD = 3;
|
const RETRY_ALERT_THRESHOLD = 3;
|
||||||
|
|
||||||
export async function processDelayedTasks(
|
export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
|
||||||
env: Cloudflare.Env,
|
|
||||||
ctx: ExecutionContext,
|
|
||||||
): Promise<void> {
|
|
||||||
console.log("Starting delayed task processing cron job");
|
console.log("Starting delayed task processing cron job");
|
||||||
|
|
||||||
const kvNamespace = env.DELAYED_TASKS;
|
const kvNamespace = env.DELAYED_TASKS;
|
||||||
@@ -31,7 +27,7 @@ export async function processDelayedTasks(
|
|||||||
console.log(`Found ${keys.length} delayed tasks to check`);
|
console.log(`Found ${keys.length} delayed tasks to check`);
|
||||||
|
|
||||||
const currentTime = Math.floor(Date.now() / 1000);
|
const currentTime = Math.floor(Date.now() / 1000);
|
||||||
const twelveHoursFromNow = currentTime + MAX_DELAY_SECONDS;
|
const maxQueueTime = currentTime + MAX_QUEUE_DELAY_SECONDS;
|
||||||
|
|
||||||
let processedCount = 0;
|
let processedCount = 0;
|
||||||
let queuedCount = 0;
|
let queuedCount = 0;
|
||||||
@@ -40,16 +36,17 @@ export async function processDelayedTasks(
|
|||||||
for (const key of keys) {
|
for (const key of keys) {
|
||||||
try {
|
try {
|
||||||
const value = await kvNamespace.get(key.name);
|
const value = await kvNamespace.get(key.name);
|
||||||
if (!value) {
|
if (!value || value == "null") {
|
||||||
console.warn(`Task key ${key.name} has no value, skipping`);
|
console.warn(`Task key ${key.name} has no value, removing`);
|
||||||
|
await kvNamespace.delete(key.name);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const metadata: DelayedTaskMetadata = deserializeDelayedTask(value);
|
const metadata: DelayedTaskMetadata = deserializeDelayedTask(value);
|
||||||
processedCount++;
|
processedCount++;
|
||||||
|
|
||||||
// Check if task is ready to be queued (within 12 hours of scheduled time)
|
// Check if task is ready to be queued (within 9 hours of scheduled time)
|
||||||
if (metadata.scheduledEpochTime <= twelveHoursFromNow) {
|
if (metadata.scheduledEpochTime <= maxQueueTime) {
|
||||||
const remainingDelay = Math.max(
|
const remainingDelay = Math.max(
|
||||||
0,
|
0,
|
||||||
metadata.scheduledEpochTime - currentTime,
|
metadata.scheduledEpochTime - currentTime,
|
||||||
@@ -100,7 +97,7 @@ export async function processDelayedTasks(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const hoursUntilReady =
|
const hoursUntilReady =
|
||||||
(metadata.scheduledEpochTime - twelveHoursFromNow) / 3600;
|
(metadata.scheduledEpochTime - maxQueueTime) / 3600;
|
||||||
console.log(
|
console.log(
|
||||||
`Task ${metadata.taskId} not ready yet (${hoursUntilReady.toFixed(1)} hours until queueable)`,
|
`Task ${metadata.taskId} not ready yet (${hoursUntilReady.toFixed(1)} hours until queueable)`,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -35,8 +35,8 @@ describe("queueTask - delayed task handling", () => {
|
|||||||
(globalThis as any).crypto = { randomUUID: vi.fn(() => "test-uuid-123") };
|
(globalThis as any).crypto = { randomUUID: vi.fn(() => "test-uuid-123") };
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("tasks with delay <= 12 hours", () => {
|
describe("tasks with delay <= 9 hours", () => {
|
||||||
it("queues task directly when delay is less than 12 hours", async () => {
|
it("queues task directly when delay is less than 9 hours", async () => {
|
||||||
await queueTask(
|
await queueTask(
|
||||||
"NEW_EPISODE",
|
"NEW_EPISODE",
|
||||||
{ aniListId: 123, episodeNumber: 1 },
|
{ aniListId: 123, episodeNumber: 1 },
|
||||||
@@ -52,12 +52,12 @@ describe("queueTask - delayed task handling", () => {
|
|||||||
expect(kvPutSpy).not.toHaveBeenCalled();
|
expect(kvPutSpy).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it("queues task directly when delay is exactly 12 hours", async () => {
|
it("queues task directly when delay is exactly 9 hours", async () => {
|
||||||
await queueTask(
|
await queueTask(
|
||||||
"NEW_EPISODE",
|
"NEW_EPISODE",
|
||||||
{ aniListId: 456, episodeNumber: 2 },
|
{ aniListId: 456, episodeNumber: 2 },
|
||||||
{
|
{
|
||||||
scheduleConfig: { delay: { hours: 12 } },
|
scheduleConfig: { delay: { hours: 9 } },
|
||||||
env: mockEnv,
|
env: mockEnv,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -9,9 +9,11 @@ import type { QueueName } from "./queueName";
|
|||||||
|
|
||||||
export type QueueBody = {
|
export type QueueBody = {
|
||||||
ANILIST_UPDATES: {
|
ANILIST_UPDATES: {
|
||||||
deviceId: string;
|
[AnilistUpdateType.UpdateWatchStatus]: {
|
||||||
watchStatus: WatchStatus | null;
|
titleId: number;
|
||||||
titleId: number;
|
watchStatus: WatchStatus | null;
|
||||||
|
aniListToken: string;
|
||||||
|
};
|
||||||
updateType: AnilistUpdateType;
|
updateType: AnilistUpdateType;
|
||||||
};
|
};
|
||||||
NEW_EPISODE: { aniListId: number; episodeNumber: number };
|
NEW_EPISODE: { aniListId: number; episodeNumber: number };
|
||||||
@@ -28,6 +30,10 @@ interface QueueTaskOptionalArgs {
|
|||||||
env?: Cloudflare.Env;
|
env?: Cloudflare.Env;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const MAX_QUEUE_DELAY_SECONDS = Duration.fromObject({ hours: 12 }).as(
|
||||||
|
"seconds",
|
||||||
|
);
|
||||||
|
|
||||||
export async function queueTask(
|
export async function queueTask(
|
||||||
queueName: QueueName,
|
queueName: QueueName,
|
||||||
body: QueueBody[QueueName],
|
body: QueueBody[QueueName],
|
||||||
@@ -40,17 +46,14 @@ export async function queueTask(
|
|||||||
req?.header(),
|
req?.header(),
|
||||||
);
|
);
|
||||||
|
|
||||||
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
|
|
||||||
|
|
||||||
// If delay exceeds 12 hours, store in KV for later processing
|
// If delay exceeds 12 hours, store in KV for later processing
|
||||||
if (scheduleTime > MAX_DELAY_SECONDS) {
|
if (scheduleTime > MAX_QUEUE_DELAY_SECONDS) {
|
||||||
if (!env || !env.DELAYED_TASKS) {
|
if (!env || !env.DELAYED_TASKS) {
|
||||||
throw new Error("DELAYED_TASKS KV namespace not available");
|
throw new Error("DELAYED_TASKS KV namespace not available");
|
||||||
}
|
}
|
||||||
|
|
||||||
const { generateTaskKey, serializeDelayedTask } = await import(
|
const { generateTaskKey, serializeDelayedTask } =
|
||||||
"./delayedTask"
|
await import("./delayedTask");
|
||||||
);
|
|
||||||
const taskId = crypto.randomUUID();
|
const taskId = crypto.randomUUID();
|
||||||
const scheduledEpochTime = Math.floor(Date.now() / 1000) + scheduleTime;
|
const scheduledEpochTime = Math.floor(Date.now() / 1000) + scheduleTime;
|
||||||
|
|
||||||
@@ -129,6 +132,9 @@ function buildTask(
|
|||||||
scheduleTime = Duration.fromDurationLike(delay).as("second");
|
scheduleTime = Duration.fromDurationLike(delay).as("second");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
const authorizationHeader = headers?.["X-Anilist-Token"]
|
||||||
|
? { Authorization: `Bearer ${headers["X-Anilist-Token"]}` }
|
||||||
|
: {};
|
||||||
|
|
||||||
switch (queueName) {
|
switch (queueName) {
|
||||||
case "ANILIST_UPDATES":
|
case "ANILIST_UPDATES":
|
||||||
@@ -137,8 +143,8 @@ function buildTask(
|
|||||||
body,
|
body,
|
||||||
scheduleTime,
|
scheduleTime,
|
||||||
headers: {
|
headers: {
|
||||||
|
...authorizationHeader,
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"X-Anilist-Token": headers?.["X-Anilist-Token"],
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -8,10 +8,12 @@ export function getTestEnvVariables(): Cloudflare.Env {
|
|||||||
export function getTestEnv({
|
export function getTestEnv({
|
||||||
ADMIN_SDK_JSON = '{"client_email": "test@test.com", "project_id": "test-26g38"}',
|
ADMIN_SDK_JSON = '{"client_email": "test@test.com", "project_id": "test-26g38"}',
|
||||||
LOG_DB_QUERIES = "false",
|
LOG_DB_QUERIES = "false",
|
||||||
|
...mockEnv
|
||||||
}: Partial<Cloudflare.Env> = {}): Cloudflare.Env {
|
}: Partial<Cloudflare.Env> = {}): Cloudflare.Env {
|
||||||
return {
|
return {
|
||||||
...env,
|
...env,
|
||||||
ADMIN_SDK_JSON,
|
ADMIN_SDK_JSON,
|
||||||
LOG_DB_QUERIES,
|
LOG_DB_QUERIES,
|
||||||
|
...mockEnv,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
25
src/middleware/userProfile.ts
Normal file
25
src/middleware/userProfile.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { createMiddleware } from "hono/factory";
|
||||||
|
|
||||||
|
import type { User } from "~/types/user";
|
||||||
|
|
||||||
|
export const userProfileMiddleware = createMiddleware<
|
||||||
|
Cloudflare.Env & {
|
||||||
|
Variables: {
|
||||||
|
user: User;
|
||||||
|
};
|
||||||
|
Bindings: Env;
|
||||||
|
}
|
||||||
|
>(async (c, next) => {
|
||||||
|
const aniListToken = await c.req.header("X-AniList-Token");
|
||||||
|
if (!aniListToken) {
|
||||||
|
return next();
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await c.env.ANILIST_DO.getByName("GLOBAL").getUser(aniListToken);
|
||||||
|
if (!user) {
|
||||||
|
return c.json({ error: "User not found" }, 401);
|
||||||
|
}
|
||||||
|
|
||||||
|
c.set("user", user);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
import { Project } from "ts-morph";
|
|
||||||
import { $ } from "zx";
|
|
||||||
|
|
||||||
import { logStep } from "~/libs/logStep";
|
|
||||||
|
|
||||||
await logStep(
|
|
||||||
'Re-generating "env.d.ts"',
|
|
||||||
() => $`wrangler types src/types/env.d.ts`.quiet(),
|
|
||||||
"Generated env.d.ts",
|
|
||||||
);
|
|
||||||
|
|
||||||
const secretNames = await logStep(
|
|
||||||
"Fetching secrets from Cloudflare",
|
|
||||||
async (): Promise<string[]> => {
|
|
||||||
const { stdout } = await $`wrangler secret list`.quiet();
|
|
||||||
return JSON.parse(stdout.toString()).map(
|
|
||||||
(secret: { name: string; type: "secret_text" }) => secret.name,
|
|
||||||
);
|
|
||||||
},
|
|
||||||
"Fetched secrets",
|
|
||||||
);
|
|
||||||
|
|
||||||
const project = new Project({});
|
|
||||||
|
|
||||||
const envSourceFile = project.addSourceFileAtPath("src/types/env.d.ts");
|
|
||||||
envSourceFile.insertImportDeclaration(2, {
|
|
||||||
isTypeOnly: true,
|
|
||||||
moduleSpecifier: "hono",
|
|
||||||
namedImports: ["Env as HonoEnv"],
|
|
||||||
});
|
|
||||||
envSourceFile
|
|
||||||
.getInterfaceOrThrow("Env")
|
|
||||||
.addExtends(["HonoEnv", "Record<string, unknown>"]);
|
|
||||||
envSourceFile.getInterfaceOrThrow("Env").addProperties(
|
|
||||||
secretNames.map((name) => ({
|
|
||||||
name,
|
|
||||||
type: `string`,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
|
|
||||||
await project.save();
|
|
||||||
|
|
||||||
await logStep(
|
|
||||||
"Formatting env.d.ts",
|
|
||||||
() => $`prettier --write src/types/env.d.ts`.quiet(),
|
|
||||||
"Formatted env.d.ts",
|
|
||||||
);
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
// import { GraphQLClient } from "graphql-request";
|
|
||||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
|
||||||
import nodeFetch from "node-fetch";
|
|
||||||
|
|
||||||
// import { GetTitleQuery } from "../libs/anilist/getTitle.ts";
|
|
||||||
|
|
||||||
const agent = new HttpsProxyAgent(
|
|
||||||
"http://ruru:pdh!CQB@kpc3vyb3cwc@45.56.108.251:3128",
|
|
||||||
);
|
|
||||||
const response = await nodeFetch("https://httpbin.org/ip", { agent });
|
|
||||||
console.log(await response.text());
|
|
||||||
console.log(response.status);
|
|
||||||
console.log(nodeFetch);
|
|
||||||
|
|
||||||
// const client = new GraphQLClient("https://graphql.anilist.co/", {
|
|
||||||
// fetch: (input, init) => {
|
|
||||||
// console.log("custom fetch");
|
|
||||||
// const agent = new HttpsProxyAgent(
|
|
||||||
// "http://ruru:pdh!CQB@kpc3vyb3cwc@45.56.108.251:3128",
|
|
||||||
// );
|
|
||||||
// return nodeFetch(input, { ...init, agent });
|
|
||||||
// },
|
|
||||||
// });
|
|
||||||
|
|
||||||
// console.log(
|
|
||||||
// await client
|
|
||||||
// .request(GetTitleQuery, { id: 186794 })
|
|
||||||
// .then((data) => data?.Media ?? undefined),
|
|
||||||
// );
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
import { readFile } from "fs/promises";
|
|
||||||
import { $, sleep } from "zx";
|
|
||||||
|
|
||||||
import { logStep } from "~/libs/logStep";
|
|
||||||
|
|
||||||
await $`cp src/types/env.d.ts /tmp/env.d.ts`.quiet();
|
|
||||||
|
|
||||||
await logStep(
|
|
||||||
'Generating "env.d.ts"',
|
|
||||||
// @ts-ignore
|
|
||||||
() => import("./generateEnv"),
|
|
||||||
"Generated env.d.ts",
|
|
||||||
);
|
|
||||||
|
|
||||||
await logStep("Comparing env.d.ts", async () => {
|
|
||||||
function filterComments(content: Buffer) {
|
|
||||||
return content
|
|
||||||
.toString()
|
|
||||||
.split("\n")
|
|
||||||
.filter((line) => !line.trim().startsWith("//"))
|
|
||||||
.join("\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentFileContent = filterComments(await readFile("/tmp/env.d.ts"));
|
|
||||||
const generatedFileContent = filterComments(
|
|
||||||
await readFile("src/types/env.d.ts"),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (currentFileContent === generatedFileContent) {
|
|
||||||
console.log("env.d.ts is up to date");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const isCI = process.env["IS_CI"] === "true";
|
|
||||||
const vcsCommand = isCI ? "git" : "sl";
|
|
||||||
await $`${vcsCommand} diff src/types/env.d.ts`.stdio("inherit");
|
|
||||||
// add 1 second to make sure spawn completes
|
|
||||||
await sleep(1000);
|
|
||||||
throw new Error("env.d.ts is out of date");
|
|
||||||
});
|
|
||||||
@@ -21,11 +21,6 @@ export const MediaFragment = graphql(`
|
|||||||
medium
|
medium
|
||||||
}
|
}
|
||||||
countryOfOrigin
|
countryOfOrigin
|
||||||
mediaListEntry {
|
|
||||||
id
|
|
||||||
progress
|
|
||||||
status
|
|
||||||
}
|
|
||||||
nextAiringEpisode {
|
nextAiringEpisode {
|
||||||
timeUntilAiring
|
timeUntilAiring
|
||||||
airingAt
|
airingAt
|
||||||
|
|||||||
@@ -3,20 +3,24 @@ import { z } from "zod";
|
|||||||
export type User = z.infer<typeof User>;
|
export type User = z.infer<typeof User>;
|
||||||
export const User = z
|
export const User = z
|
||||||
.object({
|
.object({
|
||||||
statistics: z.object({
|
|
||||||
minutesWatched: z.number().openapi({ type: "integer", format: "int64" }),
|
|
||||||
episodesWatched: z.number().openapi({ type: "integer", format: "int64" }),
|
|
||||||
count: z
|
|
||||||
.number()
|
|
||||||
.int() /* .openapi({ type: "integer", format: "int64" }) */,
|
|
||||||
meanScore: z.number().openapi({ type: "number", format: "float" }),
|
|
||||||
}),
|
|
||||||
id: z.number().openapi({ type: "integer", format: "int64" }),
|
id: z.number().openapi({ type: "integer", format: "int64" }),
|
||||||
name: z.string(),
|
name: z.string(),
|
||||||
avatar: z.object({
|
|
||||||
medium: z.string(),
|
|
||||||
large: z.string(),
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
.optional()
|
.optional()
|
||||||
.nullable();
|
.nullable();
|
||||||
|
|
||||||
|
export type UserProfile = z.infer<typeof UserProfile>;
|
||||||
|
export const UserProfile = z.object({
|
||||||
|
statistics: z.object({
|
||||||
|
minutesWatched: z.number().openapi({ type: "integer", format: "int64" }),
|
||||||
|
episodesWatched: z.number().openapi({ type: "integer", format: "int64" }),
|
||||||
|
count: z.number().int(),
|
||||||
|
meanScore: z.number().openapi({ type: "number", format: "float" }),
|
||||||
|
}),
|
||||||
|
id: z.number().openapi({ type: "integer", format: "int64" }),
|
||||||
|
name: z.string(),
|
||||||
|
avatar: z.object({
|
||||||
|
medium: z.string(),
|
||||||
|
large: z.string(),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|||||||
145
worker-configuration.d.ts
vendored
145
worker-configuration.d.ts
vendored
@@ -2,32 +2,32 @@
|
|||||||
// Generated by Wrangler by running `wrangler types` (hash: df24977940a31745cb42d562b6645de2)
|
// Generated by Wrangler by running `wrangler types` (hash: df24977940a31745cb42d562b6645de2)
|
||||||
// Runtime types generated with workerd@1.20251210.0 2025-11-28 nodejs_compat
|
// Runtime types generated with workerd@1.20251210.0 2025-11-28 nodejs_compat
|
||||||
declare namespace Cloudflare {
|
declare namespace Cloudflare {
|
||||||
interface GlobalProps {
|
interface GlobalProps {
|
||||||
mainModule: typeof import("./src/index");
|
mainModule: typeof import("./src/index");
|
||||||
durableNamespaces: "AnilistDo";
|
durableNamespaces: "AnilistDo";
|
||||||
}
|
}
|
||||||
interface Env {
|
interface Env {
|
||||||
DELAYED_TASKS: KVNamespace;
|
DELAYED_TASKS: KVNamespace;
|
||||||
ADMIN_SDK_JSON: string;
|
ADMIN_SDK_JSON: string;
|
||||||
CLOUDFLARE_TOKEN: string;
|
CLOUDFLARE_TOKEN: string;
|
||||||
CLOUDFLARE_D1_TOKEN: string;
|
CLOUDFLARE_D1_TOKEN: string;
|
||||||
CLOUDFLARE_ACCOUNT_ID: string;
|
CLOUDFLARE_ACCOUNT_ID: string;
|
||||||
CLOUDFLARE_DATABASE_ID: string;
|
CLOUDFLARE_DATABASE_ID: string;
|
||||||
PROXY_URL: string;
|
PROXY_URL: string;
|
||||||
USE_MOCK_DATA: string;
|
USE_MOCK_DATA: string;
|
||||||
LOG_DB_QUERIES: string;
|
LOG_DB_QUERIES: string;
|
||||||
ANILIST_DO: DurableObjectNamespace<import("./src/index").AnilistDo>;
|
ANILIST_DO: DurableObjectNamespace<import("./src/index").AnilistDo>;
|
||||||
DB: D1Database;
|
DB: D1Database;
|
||||||
ANILIST_UPDATES: Queue;
|
ANILIST_UPDATES: Queue;
|
||||||
NEW_EPISODE: Queue;
|
NEW_EPISODE: Queue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
interface Env extends Cloudflare.Env {}
|
interface Env extends Cloudflare.Env { }
|
||||||
type StringifyValues<EnvType extends Record<string, unknown>> = {
|
type StringifyValues<EnvType extends Record<string, unknown>> = {
|
||||||
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
|
[Binding in keyof EnvType]: EnvType[Binding] extends string ? EnvType[Binding] : string;
|
||||||
};
|
};
|
||||||
declare namespace NodeJS {
|
declare namespace NodeJS {
|
||||||
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> {}
|
interface ProcessEnv extends StringifyValues<Pick<Cloudflare.Env, "ADMIN_SDK_JSON" | "CLOUDFLARE_TOKEN" | "CLOUDFLARE_D1_TOKEN" | "CLOUDFLARE_ACCOUNT_ID" | "CLOUDFLARE_DATABASE_ID" | "PROXY_URL" | "USE_MOCK_DATA" | "LOG_DB_QUERIES">> { }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Begin runtime types
|
// Begin runtime types
|
||||||
@@ -1644,7 +1644,7 @@ declare abstract class Body {
|
|||||||
*/
|
*/
|
||||||
declare var Response: {
|
declare var Response: {
|
||||||
prototype: Response;
|
prototype: Response;
|
||||||
new (body?: BodyInit | null, init?: ResponseInit): Response;
|
new(body?: BodyInit | null, init?: ResponseInit): Response;
|
||||||
error(): Response;
|
error(): Response;
|
||||||
redirect(url: string, status?: number): Response;
|
redirect(url: string, status?: number): Response;
|
||||||
json(any: any, maybeInit?: (ResponseInit | Response)): Response;
|
json(any: any, maybeInit?: (ResponseInit | Response)): Response;
|
||||||
@@ -2192,7 +2192,7 @@ interface ReadableStream<R = any> {
|
|||||||
*/
|
*/
|
||||||
declare const ReadableStream: {
|
declare const ReadableStream: {
|
||||||
prototype: ReadableStream;
|
prototype: ReadableStream;
|
||||||
new (underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
|
new(underlyingSource: UnderlyingByteSource, strategy?: QueuingStrategy<Uint8Array>): ReadableStream<Uint8Array>;
|
||||||
new <R = any>(underlyingSource?: UnderlyingSource<R>, strategy?: QueuingStrategy<R>): ReadableStream<R>;
|
new <R = any>(underlyingSource?: UnderlyingSource<R>, strategy?: QueuingStrategy<R>): ReadableStream<R>;
|
||||||
};
|
};
|
||||||
/**
|
/**
|
||||||
@@ -3034,7 +3034,7 @@ type WebSocketEventMap = {
|
|||||||
*/
|
*/
|
||||||
declare var WebSocket: {
|
declare var WebSocket: {
|
||||||
prototype: WebSocket;
|
prototype: WebSocket;
|
||||||
new (url: string, protocols?: (string[] | string)): WebSocket;
|
new(url: string, protocols?: (string[] | string)): WebSocket;
|
||||||
readonly READY_STATE_CONNECTING: number;
|
readonly READY_STATE_CONNECTING: number;
|
||||||
readonly CONNECTING: number;
|
readonly CONNECTING: number;
|
||||||
readonly READY_STATE_OPEN: number;
|
readonly READY_STATE_OPEN: number;
|
||||||
@@ -3091,7 +3091,7 @@ interface WebSocket extends EventTarget<WebSocketEventMap> {
|
|||||||
extensions: string | null;
|
extensions: string | null;
|
||||||
}
|
}
|
||||||
declare const WebSocketPair: {
|
declare const WebSocketPair: {
|
||||||
new (): {
|
new(): {
|
||||||
0: WebSocket;
|
0: WebSocket;
|
||||||
1: WebSocket;
|
1: WebSocket;
|
||||||
};
|
};
|
||||||
@@ -9413,21 +9413,21 @@ interface IncomingRequestCfPropertiesTLSClientAuthPlaceholder {
|
|||||||
certNotAfter: "";
|
certNotAfter: "";
|
||||||
}
|
}
|
||||||
/** Possible outcomes of TLS verification */
|
/** Possible outcomes of TLS verification */
|
||||||
declare type CertVerificationStatus =
|
declare type CertVerificationStatus =
|
||||||
/** Authentication succeeded */
|
/** Authentication succeeded */
|
||||||
"SUCCESS"
|
"SUCCESS"
|
||||||
/** No certificate was presented */
|
/** No certificate was presented */
|
||||||
| "NONE"
|
| "NONE"
|
||||||
/** Failed because the certificate was self-signed */
|
/** Failed because the certificate was self-signed */
|
||||||
| "FAILED:self signed certificate"
|
| "FAILED:self signed certificate"
|
||||||
/** Failed because the certificate failed a trust chain check */
|
/** Failed because the certificate failed a trust chain check */
|
||||||
| "FAILED:unable to verify the first certificate"
|
| "FAILED:unable to verify the first certificate"
|
||||||
/** Failed because the certificate not yet valid */
|
/** Failed because the certificate not yet valid */
|
||||||
| "FAILED:certificate is not yet valid"
|
| "FAILED:certificate is not yet valid"
|
||||||
/** Failed because the certificate is expired */
|
/** Failed because the certificate is expired */
|
||||||
| "FAILED:certificate has expired"
|
| "FAILED:certificate has expired"
|
||||||
/** Failed for another unspecified reason */
|
/** Failed for another unspecified reason */
|
||||||
| "FAILED";
|
| "FAILED";
|
||||||
/**
|
/**
|
||||||
* An upstream endpoint's response to a TCP `keepalive` message from Cloudflare.
|
* An upstream endpoint's response to a TCP `keepalive` message from Cloudflare.
|
||||||
*/
|
*/
|
||||||
@@ -9477,15 +9477,15 @@ interface D1ExecResult {
|
|||||||
count: number;
|
count: number;
|
||||||
duration: number;
|
duration: number;
|
||||||
}
|
}
|
||||||
type D1SessionConstraint =
|
type D1SessionConstraint =
|
||||||
// Indicates that the first query should go to the primary, and the rest queries
|
// Indicates that the first query should go to the primary, and the rest queries
|
||||||
// using the same D1DatabaseSession will go to any replica that is consistent with
|
// using the same D1DatabaseSession will go to any replica that is consistent with
|
||||||
// the bookmark maintained by the session (returned by the first query).
|
// the bookmark maintained by the session (returned by the first query).
|
||||||
'first-primary'
|
'first-primary'
|
||||||
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
|
// Indicates that the first query can go anywhere (primary or replica), and the rest queries
|
||||||
// using the same D1DatabaseSession will go to any replica that is consistent with
|
// using the same D1DatabaseSession will go to any replica that is consistent with
|
||||||
// the bookmark maintained by the session (returned by the first query).
|
// the bookmark maintained by the session (returned by the first query).
|
||||||
| 'first-unconstrained';
|
| 'first-unconstrained';
|
||||||
type D1SessionBookmark = string;
|
type D1SessionBookmark = string;
|
||||||
declare abstract class D1Database {
|
declare abstract class D1Database {
|
||||||
prepare(query: string): D1PreparedStatement;
|
prepare(query: string): D1PreparedStatement;
|
||||||
@@ -9599,7 +9599,7 @@ declare type EmailExportedHandler<Env = unknown> = (message: ForwardableEmailMes
|
|||||||
declare module "cloudflare:email" {
|
declare module "cloudflare:email" {
|
||||||
let _EmailMessage: {
|
let _EmailMessage: {
|
||||||
prototype: EmailMessage;
|
prototype: EmailMessage;
|
||||||
new (from: string, to: string, raw: ReadableStream | string): EmailMessage;
|
new(from: string, to: string, raw: ReadableStream | string): EmailMessage;
|
||||||
};
|
};
|
||||||
export { _EmailMessage as EmailMessage };
|
export { _EmailMessage as EmailMessage };
|
||||||
}
|
}
|
||||||
@@ -10058,17 +10058,17 @@ declare namespace Rpc {
|
|||||||
// The reason for using a generic type here is to build a serializable subset of structured
|
// The reason for using a generic type here is to build a serializable subset of structured
|
||||||
// cloneable composite types. This allows types defined with the "interface" keyword to pass the
|
// cloneable composite types. This allows types defined with the "interface" keyword to pass the
|
||||||
// serializable check as well. Otherwise, only types defined with the "type" keyword would pass.
|
// serializable check as well. Otherwise, only types defined with the "type" keyword would pass.
|
||||||
type Serializable<T> =
|
type Serializable<T> =
|
||||||
// Structured cloneables
|
// Structured cloneables
|
||||||
BaseType
|
BaseType
|
||||||
// Structured cloneable composites
|
// Structured cloneable composites
|
||||||
| Map<T extends Map<infer U, unknown> ? Serializable<U> : never, T extends Map<unknown, infer U> ? Serializable<U> : never> | Set<T extends Set<infer U> ? Serializable<U> : never> | ReadonlyArray<T extends ReadonlyArray<infer U> ? Serializable<U> : never> | {
|
| Map<T extends Map<infer U, unknown> ? Serializable<U> : never, T extends Map<unknown, infer U> ? Serializable<U> : never> | Set<T extends Set<infer U> ? Serializable<U> : never> | ReadonlyArray<T extends ReadonlyArray<infer U> ? Serializable<U> : never> | {
|
||||||
[K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
|
[K in keyof T]: K extends number | string ? Serializable<T[K]> : never;
|
||||||
}
|
}
|
||||||
// Special types
|
// Special types
|
||||||
| Stub<Stubable>
|
| Stub<Stubable>
|
||||||
// Serialized as stubs, see `Stubify`
|
// Serialized as stubs, see `Stubify`
|
||||||
| Stubable;
|
| Stubable;
|
||||||
// Base type for all RPC stubs, including common memory management methods.
|
// Base type for all RPC stubs, including common memory management methods.
|
||||||
// `T` is used as a marker type for unwrapping `Stub`s later.
|
// `T` is used as a marker type for unwrapping `Stub`s later.
|
||||||
interface StubBase<T extends Stubable> extends Disposable {
|
interface StubBase<T extends Stubable> extends Disposable {
|
||||||
@@ -10083,8 +10083,8 @@ declare namespace Rpc {
|
|||||||
type Stubify<T> = T extends Stubable ? Stub<T> : T extends Map<infer K, infer V> ? Map<Stubify<K>, Stubify<V>> : T extends Set<infer V> ? Set<Stubify<V>> : T extends Array<infer V> ? Array<Stubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Stubify<V>> : T extends BaseType ? T : T extends {
|
type Stubify<T> = T extends Stubable ? Stub<T> : T extends Map<infer K, infer V> ? Map<Stubify<K>, Stubify<V>> : T extends Set<infer V> ? Set<Stubify<V>> : T extends Array<infer V> ? Array<Stubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Stubify<V>> : T extends BaseType ? T : T extends {
|
||||||
[key: string | number]: any;
|
[key: string | number]: any;
|
||||||
} ? {
|
} ? {
|
||||||
[K in keyof T]: Stubify<T[K]>;
|
[K in keyof T]: Stubify<T[K]>;
|
||||||
} : T;
|
} : T;
|
||||||
// Recursively rewrite all `Stub<T>`s with the corresponding `T`s.
|
// Recursively rewrite all `Stub<T>`s with the corresponding `T`s.
|
||||||
// Note we use `StubBase` instead of `Stub` here to avoid circular dependencies:
|
// Note we use `StubBase` instead of `Stub` here to avoid circular dependencies:
|
||||||
// `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`.
|
// `Stub` depends on `Provider`, which depends on `Unstubify`, which would depend on `Stub`.
|
||||||
@@ -10092,8 +10092,8 @@ declare namespace Rpc {
|
|||||||
type Unstubify<T> = T extends StubBase<infer V> ? V : T extends Map<infer K, infer V> ? Map<Unstubify<K>, Unstubify<V>> : T extends Set<infer V> ? Set<Unstubify<V>> : T extends Array<infer V> ? Array<Unstubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Unstubify<V>> : T extends BaseType ? T : T extends {
|
type Unstubify<T> = T extends StubBase<infer V> ? V : T extends Map<infer K, infer V> ? Map<Unstubify<K>, Unstubify<V>> : T extends Set<infer V> ? Set<Unstubify<V>> : T extends Array<infer V> ? Array<Unstubify<V>> : T extends ReadonlyArray<infer V> ? ReadonlyArray<Unstubify<V>> : T extends BaseType ? T : T extends {
|
||||||
[key: string | number]: unknown;
|
[key: string | number]: unknown;
|
||||||
} ? {
|
} ? {
|
||||||
[K in keyof T]: Unstubify<T[K]>;
|
[K in keyof T]: Unstubify<T[K]>;
|
||||||
} : T;
|
} : T;
|
||||||
type UnstubifyAll<A extends any[]> = {
|
type UnstubifyAll<A extends any[]> = {
|
||||||
[I in keyof A]: Unstubify<A[I]>;
|
[I in keyof A]: Unstubify<A[I]>;
|
||||||
};
|
};
|
||||||
@@ -10166,7 +10166,7 @@ declare namespace Cloudflare {
|
|||||||
[K in keyof MainModule]: LoopbackForExport<MainModule[K]>
|
[K in keyof MainModule]: LoopbackForExport<MainModule[K]>
|
||||||
// If the export is listed in `durableNamespaces`, then it is also a
|
// If the export is listed in `durableNamespaces`, then it is also a
|
||||||
// DurableObjectNamespace.
|
// DurableObjectNamespace.
|
||||||
& (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace<DoInstance> : DurableObjectNamespace<undefined> : DurableObjectNamespace<undefined> : {});
|
& (K extends GlobalProp<"durableNamespaces", never> ? MainModule[K] extends new (...args: any[]) => infer DoInstance ? DoInstance extends Rpc.DurableObjectBranded ? DurableObjectNamespace<DoInstance> : DurableObjectNamespace<undefined> : DurableObjectNamespace<undefined> : {});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
declare namespace CloudflareWorkersModule {
|
declare namespace CloudflareWorkersModule {
|
||||||
@@ -10251,6 +10251,9 @@ declare namespace CloudflareWorkersModule {
|
|||||||
export const env: Cloudflare.Env;
|
export const env: Cloudflare.Env;
|
||||||
export const exports: Cloudflare.Exports;
|
export const exports: Cloudflare.Exports;
|
||||||
}
|
}
|
||||||
|
declare module 'cloudflare:test' {
|
||||||
|
export = CloudflareWorkersModule;
|
||||||
|
}
|
||||||
declare module 'cloudflare:workers' {
|
declare module 'cloudflare:workers' {
|
||||||
export = CloudflareWorkersModule;
|
export = CloudflareWorkersModule;
|
||||||
}
|
}
|
||||||
@@ -10822,10 +10825,10 @@ interface WorkflowInstanceCreateOptions<PARAMS = unknown> {
|
|||||||
}
|
}
|
||||||
type InstanceStatus = {
|
type InstanceStatus = {
|
||||||
status: 'queued' // means that instance is waiting to be started (see concurrency limits)
|
status: 'queued' // means that instance is waiting to be started (see concurrency limits)
|
||||||
| 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running
|
| 'running' | 'paused' | 'errored' | 'terminated' // user terminated the instance while it was running
|
||||||
| 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish
|
| 'complete' | 'waiting' // instance is hibernating and waiting for sleep or event to finish
|
||||||
| 'waitingForPause' // instance is finishing the current work to pause
|
| 'waitingForPause' // instance is finishing the current work to pause
|
||||||
| 'unknown';
|
| 'unknown';
|
||||||
error?: {
|
error?: {
|
||||||
name: string;
|
name: string;
|
||||||
message: string;
|
message: string;
|
||||||
|
|||||||
@@ -39,6 +39,14 @@ deleted_classes = ["AnilistDo"]
|
|||||||
tag = "v4"
|
tag = "v4"
|
||||||
new_sqlite_classes = ["AnilistDo"]
|
new_sqlite_classes = ["AnilistDo"]
|
||||||
|
|
||||||
|
[[migrations]]
|
||||||
|
tag = "v5"
|
||||||
|
deleted_classes = ["AnilistDo"]
|
||||||
|
|
||||||
|
[[migrations]]
|
||||||
|
tag = "v6"
|
||||||
|
new_sqlite_classes = ["AnilistDo"]
|
||||||
|
|
||||||
[[queues.producers]]
|
[[queues.producers]]
|
||||||
queue = "anilist-updates"
|
queue = "anilist-updates"
|
||||||
binding = "ANILIST_UPDATES"
|
binding = "ANILIST_UPDATES"
|
||||||
@@ -59,7 +67,7 @@ id = "c8db249d8ee7462b91f9c374321776e4"
|
|||||||
preview_id = "ff38240eb2aa4b1388c705f4974f5aec"
|
preview_id = "ff38240eb2aa4b1388c705f4974f5aec"
|
||||||
|
|
||||||
[triggers]
|
[triggers]
|
||||||
crons = ["0 */12 * * *"]
|
crons = ["0 */12 * * *", "0 18 * * *"]
|
||||||
|
|
||||||
[[d1_databases]]
|
[[d1_databases]]
|
||||||
binding = "DB"
|
binding = "DB"
|
||||||
|
|||||||
Reference in New Issue
Block a user