Compare commits
13 Commits
main
...
dbc78727bd
| Author | SHA1 | Date | |
|---|---|---|---|
| dbc78727bd | |||
| ec42ac4026 | |||
| 6f93c46f1e | |||
| 5f0302d25a | |||
| 4b4eac20a7 | |||
| b0650fc840 | |||
| fedc5e46da | |||
| a2702db794 | |||
| 311d575c09 | |||
| cc4f518de7 | |||
| 9b5cc7ea62 | |||
| c24ff62b30 | |||
| 495506935e |
16
.dockerignore
Normal file
16
.dockerignore
Normal file
@@ -0,0 +1,16 @@
|
||||
node_modules
|
||||
Dockerfile*
|
||||
docker-compose*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
README.md
|
||||
LICENSE
|
||||
.vscode
|
||||
Makefile
|
||||
helm-charts
|
||||
.env
|
||||
.dev.vars
|
||||
.editorconfig
|
||||
.idea
|
||||
coverage*
|
||||
36
.github/workflows/main.yml
vendored
Normal file
36
.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: Deploy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Deploy
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.1.27
|
||||
- name: Install dependencies
|
||||
run: bun install
|
||||
- name: Install libsql server
|
||||
run: curl --proto '=https' --tlsv1.2 -LsSf https://github.com/tursodatabase/libsql/releases/download/libsql-server-v0.24.14/libsql-server-installer.sh | sh
|
||||
- name: Test
|
||||
run: bun run test --dbCommand \"~/.cargo/bin/sqld --http-listen-addr=127.0.0.1:3001\"
|
||||
# TODO: uncomment this when https://github.com/cloudflare/workers-sdk/issues/5082 is fixed
|
||||
# - name: Verify env
|
||||
# run: bun env:verify
|
||||
# env:
|
||||
# CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
# IS_CI: true
|
||||
- name: Run DB migration
|
||||
run: bun run db:migrate
|
||||
env:
|
||||
TURSO_URL: ${{ vars.TURSO_URL }}
|
||||
TURSO_AUTH_TOKEN: ${{ secrets.TURSO_AUTH_TOKEN }}
|
||||
- name: Deploy
|
||||
uses: cloudflare/wrangler-action@v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
@@ -1 +1 @@
|
||||
nlx lint-staged
|
||||
bunx lint-staged
|
||||
|
||||
41
Dockerfile
Normal file
41
Dockerfile
Normal file
@@ -0,0 +1,41 @@
|
||||
# use the official Bun image
|
||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
||||
FROM oven/bun:1 as base
|
||||
WORKDIR /usr/app
|
||||
|
||||
# install dependencies into temp directory
|
||||
# this will cache them and speed up future builds
|
||||
FROM base AS install
|
||||
RUN mkdir -p /tmp/dev
|
||||
COPY package.json bun.lockb /tmp/dev/
|
||||
RUN cd /tmp/dev && bun install --frozen-lockfile
|
||||
|
||||
# install with --production (exclude devDependencies)
|
||||
RUN mkdir -p /tmp/prod
|
||||
COPY package.json bun.lockb /tmp/prod/
|
||||
RUN cd /tmp/prod && bun install --frozen-lockfile --production
|
||||
|
||||
# copy node_modules from temp directory
|
||||
# then copy all (non-ignored) project files into the image
|
||||
FROM base AS prerelease
|
||||
COPY --from=install /tmp/dev/node_modules node_modules
|
||||
COPY . .
|
||||
|
||||
# [optional] tests & build
|
||||
ENV NODE_ENV=production
|
||||
RUN bun test
|
||||
RUN bun build --compile src/index.ts --outfile=aniplay
|
||||
|
||||
# copy production dependencies and source code into final image
|
||||
FROM base AS release
|
||||
COPY --from=install /tmp/prod/node_modules node_modules
|
||||
COPY --from=prerelease /usr/app/src ./src
|
||||
COPY --from=prerelease /usr/app/package.json .
|
||||
COPY --from=prerelease /usr/app/tsconfig.json .
|
||||
# TODO: uncomment once v2 is ready
|
||||
# COPY --from=prerelease /usr/app/drizzle.config.ts .
|
||||
|
||||
# run the app
|
||||
USER bun
|
||||
EXPOSE 3000
|
||||
ENTRYPOINT [ "bun", "run", "prod:server" ]
|
||||
76
README.md
76
README.md
@@ -1,72 +1,12 @@
|
||||
# Aniplay API
|
||||
```
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
API for [Aniplay](https://github.com/silverAndroid/aniplay), built with Cloudflare Workers, Hono, and Drizzle ORM.
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **Cloudflare Workers**: Serverless execution environment.
|
||||
- **Hono**: Ultrafast web framework (OpenAPI).
|
||||
- **GraphQL**: Used internally for communicating with the [AniList](https://anilist.co) API.
|
||||
- **Drizzle ORM**: TypeScript ORM for D1 (Cloudflare's serverless SQL database).
|
||||
- **Vitest**: Testing framework.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- **Node.js**
|
||||
- **pnpm**: Package manager.
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. **Installation**
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
2. **Environment Setup**
|
||||
Generate the environment types:
|
||||
|
||||
```bash
|
||||
pnpm exec wrangler types
|
||||
```
|
||||
|
||||
3. **Database Setup**
|
||||
Apply migrations to the local D1 database:
|
||||
```bash
|
||||
pnpm exec wrangler d1 migrations apply aniplay
|
||||
```
|
||||
```
|
||||
npm run deploy
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
### Running Locally
|
||||
|
||||
Start the development server:
|
||||
|
||||
```bash
|
||||
pnpm run dev
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
Run the tests using Vitest:
|
||||
|
||||
```bash
|
||||
pnpm test
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
Deploy to Cloudflare Workers:
|
||||
|
||||
```bash
|
||||
pnpm run deploy
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
- `src/controllers`: API route handlers (titles, episodes, search, etc.)
|
||||
- `src/libs`: Shared utilities and logic (AniList integration, background tasks)
|
||||
- `src/middleware`: Middleware handlers (authentication, authorization, etc.)
|
||||
- `src/models`: Database schema and models
|
||||
- `src/scripts`: Utility scripts for maintenance and setup
|
||||
- `src/types`: TypeScript type definitions
|
||||
If a route is internal-only or doesn't need to appear on the OpenAPI spec (that's autogenerated by Hono), use the `Hono` class. Otherwise, use the `OpenAPIHono` class from `@hono/zod-openapi`.
|
||||
|
||||
7
bunfig.toml
Normal file
7
bunfig.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[test]
|
||||
preload = [
|
||||
"./testSetup.ts",
|
||||
"./src/mocks/consumet.ts",
|
||||
"./src/mocks/getGoogleAuthToken.ts",
|
||||
"./src/mocks/cloudflare.ts",
|
||||
]
|
||||
40
package.json
40
package.json
@@ -5,39 +5,53 @@
|
||||
"main": "src/index.ts",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "wrangler dev src/index.ts --port 8080",
|
||||
"dev:cloudflare": "wrangler dev src/index.ts --port 8080",
|
||||
"dev:server": "TURSO_URL=http://127.0.0.1:3000 TURSO_AUTH_TOKEN=123 bun run --watch src/index.ts",
|
||||
"prod:server": "bun run src/index.ts",
|
||||
"deploy": "wrangler deploy --minify src/index.ts",
|
||||
"env:generate": "bun src/scripts/generateEnv.ts",
|
||||
"env:verify": "bun src/scripts/verifyEnv.ts",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"test": "vitest",
|
||||
"test:ui": "vitest --ui",
|
||||
"test:coverage": "vitest run --coverage",
|
||||
"test": "bun src/testRunner.ts",
|
||||
"prepare": "husky",
|
||||
"tsx": "tsx"
|
||||
},
|
||||
"dependencies": {
|
||||
"@consumet/extensions": "github:consumet/consumet.ts#3dd0ccb",
|
||||
"@haverstack/axios-fetch-adapter": "^0.12.0",
|
||||
"@hono/swagger-ui": "^0.5.1",
|
||||
"@hono/zod-openapi": "^0.19.5",
|
||||
"@hono/zod-validator": "^0.2.2",
|
||||
"blurhash": "^2.0.5",
|
||||
"drizzle-orm": "^0.44.7",
|
||||
"gql.tada": "^1.8.10",
|
||||
"graphql": "^16.12.0",
|
||||
"graphql-request": "^7.4.0",
|
||||
"graphql-request": "^7.1.2",
|
||||
"graphql-yoga": "^5.17.0",
|
||||
"hono": "^4.7.7",
|
||||
"jose": "^5.10.0",
|
||||
"jpeg-js": "^0.4.4",
|
||||
"lodash.isequal": "^4.5.0",
|
||||
"lodash.mapkeys": "^4.6.0",
|
||||
"luxon": "^3.6.1",
|
||||
"pngjs": "^7.0.0",
|
||||
"zod": "^3.24.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cloudflare/vitest-pool-workers": "^0.10.15",
|
||||
"@graphql-typed-document-node/core": "^3.2.0",
|
||||
"@0no-co/graphqlsp": "^1.12.16",
|
||||
"@cloudflare/vitest-pool-workers": "^0.10.7",
|
||||
"@cloudflare/workers-types": "^4.20250423.0",
|
||||
"@trivago/prettier-plugin-sort-imports": "^4.3.0",
|
||||
"@types/bun": "^1.2.10",
|
||||
"@types/lodash.isequal": "^4.5.8",
|
||||
"@types/lodash.mapkeys": "^4.6.9",
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/node": "^25.0.1",
|
||||
"@vitest/coverage-istanbul": "~3.2.4",
|
||||
"@vitest/ui": "~3.2.4",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
"@vitest/coverage-v8": "^3.2.4",
|
||||
"@vitest/runner": "^3.2.4",
|
||||
"@vitest/snapshot": "^3.2.4",
|
||||
"cloudflare": "^5.2.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"drizzle-kit": "^0.31.7",
|
||||
@@ -45,13 +59,15 @@
|
||||
"gtoken": "^7.1.0",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^15.5.1",
|
||||
"miniflare": "^3.20241106.0",
|
||||
"miniflare": "^4.20251109.1",
|
||||
"msw": "2.4.3",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier-plugin-toml": "^2.0.4",
|
||||
"ts-morph": "^22.0.0",
|
||||
"tsx": "^4.20.6",
|
||||
"typescript": "^5.8.3",
|
||||
"vitest": "~3.2.4",
|
||||
"util": "^0.12.5",
|
||||
"vitest": "^3.2.4",
|
||||
"wrangler": "^4.51.0",
|
||||
"zx": "8.1.5"
|
||||
},
|
||||
|
||||
30
patches/blurhash.patch
Normal file
30
patches/blurhash.patch
Normal file
@@ -0,0 +1,30 @@
|
||||
diff --git a/CHANGELOG.md b/CHANGELOG.md
|
||||
deleted file mode 100644
|
||||
index f793ae02ac3104ed8272b06e4067edde2944a1b9..0000000000000000000000000000000000000000
|
||||
diff --git a/dist/esm/index.js b/dist/esm/index.js
|
||||
index 254eb7a0a33eba9f6622552cfaa88db9c01ab73a..06380b72abb031372b5b176078bb7199f62d62d1 100644
|
||||
--- a/dist/esm/index.js
|
||||
+++ b/dist/esm/index.js
|
||||
@@ -1,2 +1 @@
|
||||
-var q=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","#","$","%","*","+",",","-",".",":",";","=","?","@","[","]","^","_","{","|","}","~"],x=t=>{let e=0;for(let r=0;r<t.length;r++){let n=t[r],l=q.indexOf(n);e=e*83+l}return e},p=(t,e)=>{var r="";for(let n=1;n<=e;n++){let l=Math.floor(t)/Math.pow(83,e-n)%83;r+=q[Math.floor(l)]}return r};var f=t=>{let e=t/255;return e<=.04045?e/12.92:Math.pow((e+.055)/1.055,2.4)},h=t=>{let e=Math.max(0,Math.min(1,t));return e<=.0031308?Math.trunc(e*12.92*255+.5):Math.trunc((1.055*Math.pow(e,.4166666666666667)-.055)*255+.5)},F=t=>t<0?-1:1,M=(t,e)=>F(t)*Math.pow(Math.abs(t),e);var d=class extends Error{constructor(e){super(e),this.name="ValidationError",this.message=e}};var C=t=>{if(!t||t.length<6)throw new d("The blurhash string must be at least 6 characters");let e=x(t[0]),r=Math.floor(e/9)+1,n=e%9+1;if(t.length!==4+2*n*r)throw new d(`blurhash length mismatch: length is ${t.length} but it should be ${4+2*n*r}`)},N=t=>{try{C(t)}catch(e){return{result:!1,errorReason:e.message}}return{result:!0}},z=t=>{let e=t>>16,r=t>>8&255,n=t&255;return[f(e),f(r),f(n)]},L=(t,e)=>{let r=Math.floor(t/361),n=Math.floor(t/19)%19,l=t%19;return[M((r-9)/9,2)*e,M((n-9)/9,2)*e,M((l-9)/9,2)*e]},U=(t,e,r,n)=>{C(t),n=n|1;let l=x(t[0]),m=Math.floor(l/9)+1,b=l%9+1,i=(x(t[1])+1)/166,u=new Array(b*m);for(let o=0;o<u.length;o++)if(o===0){let a=x(t.substring(2,6));u[o]=z(a)}else{let a=x(t.substring(4+o*2,6+o*2));u[o]=L(a,i*n)}let c=e*4,s=new Uint8ClampedArray(c*r);for(let o=0;o<r;o++)for(let a=0;a<e;a++){let y=0,B=0,R=0;for(let w=0;w<m;w++)for(let P=0;P<b;P++){let G=Math.cos(Math.PI*a*P/e)*Math.cos(Math.PI*o*w/r),T=u[P+w*b];y+=T[0]*G,B+=T[1]*G,R+=T[2]*G}let V=h(y),I=h(B),E=h(R);s[4*a+0+o*c]=V,s[4*a+1+o*c]=I,s[4*a+2+o*c]=E,s[4*a+3+o*c]=255}return s},j=U;var A=4,D=(t,e,r,n)=>{let l=0,m=0,b=0,g=e*A;for(let u=0;u<e;u++){let c=A*u;for(let s=0;s<r;s++){let o=c+s*g,a=n(u,s);l+=a*f(t[o]),m+=a*f(t[o+1]),b+=a*f(t[o+2])}}let i=1/(e*r);return[l*i,m*i,b*i]},$=t=>{let e=h(t[0]),r=h(t[1]),n=h(t[2]);return(e<<16)+(r<<8)+n},H=(t,e)=>{let r=Math.floor(Math.max(0,Math.min(18,Math.floor(M(t[0]/e,.5)*9+9.5)))),n=Math.floor(Math.max(0,Math.min(18,Math.floor(M(t[1]/e,.5)*9+9.5)))),l=Math.floor(Math.max(0,Math.min(18,Math.floor(M(t[2]/e,.5)*9+9.5))));return r*19*19+n*19+l},O=(t,e,r,n,l)=>{if(n<1||n>9||l<1||l>9)throw new d("BlurHash must have between 1 and 9 components");if(e*r*4!==t.length)throw new d("Width and height must match the pixels array");let m=[];for(let s=0;s<l;s++)for(let o=0;o<n;o++){let a=o==0&&s==0?1:2,y=D(t,e,r,(B,R)=>a*Math.cos(Math.PI*o*B/e)*Math.cos(Math.PI*s*R/r));m.push(y)}let b=m[0],g=m.slice(1),i="",u=n-1+(l-1)*9;i+=p(u,1);let c;if(g.length>0){let s=Math.max(...g.map(a=>Math.max(...a))),o=Math.floor(Math.max(0,Math.min(82,Math.floor(s*166-.5))));c=(o+1)/166,i+=p(o,1)}else c=1,i+=p(0,1);return i+=p($(b),4),g.forEach(s=>{i+=p(H(s,c),2)}),i},S=O;export{d as ValidationError,j as decode,S as encode,N as isBlurhashValid};
|
||||
-//# sourceMappingURL=index.js.map
|
||||
\ No newline at end of file
|
||||
+var A=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","#","$","%","*","+",",","-",".",":",";","=","?","@","[","]","^","_","{","|","}","~"],d=t=>{let r=0;for(let a=0;a<t.length;a++){let l=t[a],o=A.indexOf(l);r=r*83+o}return r},b=(t,r)=>{var a="";for(let l=1;l<=r;l++){let o=Math.floor(t)/Math.pow(83,r-l)%83;a+=A[Math.floor(o)]}return a},c=t=>{let r=t/255;return r<=.04045?r/12.92:Math.pow((r+.055)/1.055,2.4)},g=t=>{let r=Math.max(0,Math.min(1,t));return r<=.0031308?Math.trunc(r*12.92*255+.5):Math.trunc((1.055*Math.pow(r,.4166666666666667)-.055)*255+.5)},O=t=>t<0?-1:1,w=(t,r)=>O(t)*Math.pow(Math.abs(t),r),p=class extends Error{constructor(t){super(t),this.name="ValidationError",this.message=t}},B=t=>{if(!t||t.length<6)throw new p("The blurhash string must be at least 6 characters");let r=d(t[0]),a=Math.floor(r/9)+1,l=r%9+1;if(t.length!==4+2*l*a)throw new p(`blurhash length mismatch: length is ${t.length} but it should be ${4+2*l*a}`)},R=t=>{try{B(t)}catch(r){return{result:!1,errorReason:r.message}}return{result:!0}},T=t=>{let r=t>>16,a=t>>8&255,l=t&255;return[c(r),c(a),c(l)]},U=(t,r)=>{let a=Math.floor(t/361),l=Math.floor(t/19)%19,o=t%19;return[w((a-9)/9,2)*r,w((l-9)/9,2)*r,w((o-9)/9,2)*r]},j=(t,r,a,l)=>{B(t),l=l|1;let o=d(t[0]),i=Math.floor(o/9)+1,u=o%9+1,m=(d(t[1])+1)/166,n=new Array(u*i);for(let e=0;e<n.length;e++)if(e===0){let h=d(t.substring(2,6));n[e]=T(h)}else{let h=d(t.substring(4+e*2,6+e*2));n[e]=U(h,m*l)}let s=r*4,M=new Uint8ClampedArray(s*a);for(let e=0;e<a;e++)for(let h=0;h<r;h++){let f=0,x=0,v=0;for(let y=0;y<i;y++)for(let E=0;E<u;E++){let P=Math.cos(Math.PI*h*E/r)*Math.cos(Math.PI*e*y/a),V=n[E+y*u];f+=V[0]*P,x+=V[1]*P,v+=V[2]*P}let I=g(f),C=g(x),H=g(v);M[4*h+0+e*s]=I,M[4*h+1+e*s]=C,M[4*h+2+e*s]=H,M[4*h+3+e*s]=255}return M},q=j,$=4,z=(t,r,a,l)=>{let o=0,i=0,u=0,m=r*$;for(let s=0;s<r;s++){let M=$*s;for(let e=0;e<a;e++){let h=M+e*m,f=l(s,e);o+=f*c(t[h]),i+=f*c(t[h+1]),u+=f*c(t[h+2])}}let n=1/(r*a);return[o*n,i*n,u*n]},D=t=>{let r=g(t[0]),a=g(t[1]),l=g(t[2]);return(r<<16)+(a<<8)+l},F=(t,r)=>{let a=Math.floor(Math.max(0,Math.min(18,Math.floor(w(t[0]/r,.5)*9+9.5)))),l=Math.floor(Math.max(0,Math.min(18,Math.floor(w(t[1]/r,.5)*9+9.5)))),o=Math.floor(Math.max(0,Math.min(18,Math.floor(w(t[2]/r,.5)*9+9.5))));return a*19*19+l*19+o},G=(t,r,a,l,o)=>{if(l<1||l>9||o<1||o>9)throw new p("BlurHash must have between 1 and 9 components");if(Math.floor(r*a*4)!==t.length)throw new p("Width and height must match the pixels array");let i=[];for(let e=0;e<o;e++)for(let h=0;h<l;h++){let f=h==0&&e==0?1:2,x=z(t,r,a,(v,I)=>f*Math.cos(Math.PI*h*v/r)*Math.cos(Math.PI*e*I/a));i.push(x)}let u=i[0],m=i.slice(1),n="",s=l-1+(o-1)*9;n+=b(s,1);let M;if(m.length>0){let e=Math.max(...m.map(f=>Math.max(...f))),h=Math.floor(Math.max(0,Math.min(82,Math.floor(e*166-.5))));M=(h+1)/166,n+=b(h,1)}else M=1,n+=b(0,1);return n+=b(D(u),4),m.forEach(e=>{n+=b(F(e,M),2)}),n},L=G;export{p as ValidationError,q as decode,L as encode,R as isBlurhashValid};
|
||||
diff --git a/dist/index.js b/dist/index.js
|
||||
index fe46957ffed377f20992b86da266ce679c515802..075ab8fe648c9a34edcee9a842eb00c34eaa5179 100644
|
||||
--- a/dist/index.js
|
||||
+++ b/dist/index.js
|
||||
@@ -1,2 +1 @@
|
||||
-var q=Object.defineProperty;var U=Object.getOwnPropertyDescriptor;var j=Object.getOwnPropertyNames;var D=Object.prototype.hasOwnProperty;var $=(t,e)=>{for(var r in e)q(t,r,{get:e[r],enumerable:!0})},H=(t,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of j(e))!D.call(t,s)&&s!==r&&q(t,s,{get:()=>e[s],enumerable:!(n=U(e,s))||n.enumerable});return t};var O=t=>H(q({},"__esModule",{value:!0}),t);var _={};$(_,{ValidationError:()=>b,decode:()=>I,encode:()=>F,isBlurhashValid:()=>V});module.exports=O(_);var C=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","#","$","%","*","+",",","-",".",":",";","=","?","@","[","]","^","_","{","|","}","~"],x=t=>{let e=0;for(let r=0;r<t.length;r++){let n=t[r],s=C.indexOf(n);e=e*83+s}return e},p=(t,e)=>{var r="";for(let n=1;n<=e;n++){let s=Math.floor(t)/Math.pow(83,e-n)%83;r+=C[Math.floor(s)]}return r};var h=t=>{let e=t/255;return e<=.04045?e/12.92:Math.pow((e+.055)/1.055,2.4)},M=t=>{let e=Math.max(0,Math.min(1,t));return e<=.0031308?Math.trunc(e*12.92*255+.5):Math.trunc((1.055*Math.pow(e,.4166666666666667)-.055)*255+.5)},S=t=>t<0?-1:1,d=(t,e)=>S(t)*Math.pow(Math.abs(t),e);var b=class extends Error{constructor(e){super(e),this.name="ValidationError",this.message=e}};var A=t=>{if(!t||t.length<6)throw new b("The blurhash string must be at least 6 characters");let e=x(t[0]),r=Math.floor(e/9)+1,n=e%9+1;if(t.length!==4+2*n*r)throw new b(`blurhash length mismatch: length is ${t.length} but it should be ${4+2*n*r}`)},V=t=>{try{A(t)}catch(e){return{result:!1,errorReason:e.message}}return{result:!0}},W=t=>{let e=t>>16,r=t>>8&255,n=t&255;return[h(e),h(r),h(n)]},k=(t,e)=>{let r=Math.floor(t/361),n=Math.floor(t/19)%19,s=t%19;return[d((r-9)/9,2)*e,d((n-9)/9,2)*e,d((s-9)/9,2)*e]},J=(t,e,r,n)=>{A(t),n=n|1;let s=x(t[0]),m=Math.floor(s/9)+1,f=s%9+1,i=(x(t[1])+1)/166,u=new Array(f*m);for(let o=0;o<u.length;o++)if(o===0){let l=x(t.substring(2,6));u[o]=W(l)}else{let l=x(t.substring(4+o*2,6+o*2));u[o]=k(l,i*n)}let c=e*4,a=new Uint8ClampedArray(c*r);for(let o=0;o<r;o++)for(let l=0;l<e;l++){let y=0,B=0,R=0;for(let w=0;w<m;w++)for(let P=0;P<f;P++){let G=Math.cos(Math.PI*l*P/e)*Math.cos(Math.PI*o*w/r),T=u[P+w*f];y+=T[0]*G,B+=T[1]*G,R+=T[2]*G}let N=M(y),z=M(B),L=M(R);a[4*l+0+o*c]=N,a[4*l+1+o*c]=z,a[4*l+2+o*c]=L,a[4*l+3+o*c]=255}return a},I=J;var E=4,K=(t,e,r,n)=>{let s=0,m=0,f=0,g=e*E;for(let u=0;u<e;u++){let c=E*u;for(let a=0;a<r;a++){let o=c+a*g,l=n(u,a);s+=l*h(t[o]),m+=l*h(t[o+1]),f+=l*h(t[o+2])}}let i=1/(e*r);return[s*i,m*i,f*i]},Q=t=>{let e=M(t[0]),r=M(t[1]),n=M(t[2]);return(e<<16)+(r<<8)+n},X=(t,e)=>{let r=Math.floor(Math.max(0,Math.min(18,Math.floor(d(t[0]/e,.5)*9+9.5)))),n=Math.floor(Math.max(0,Math.min(18,Math.floor(d(t[1]/e,.5)*9+9.5)))),s=Math.floor(Math.max(0,Math.min(18,Math.floor(d(t[2]/e,.5)*9+9.5))));return r*19*19+n*19+s},Z=(t,e,r,n,s)=>{if(n<1||n>9||s<1||s>9)throw new b("BlurHash must have between 1 and 9 components");if(e*r*4!==t.length)throw new b("Width and height must match the pixels array");let m=[];for(let a=0;a<s;a++)for(let o=0;o<n;o++){let l=o==0&&a==0?1:2,y=K(t,e,r,(B,R)=>l*Math.cos(Math.PI*o*B/e)*Math.cos(Math.PI*a*R/r));m.push(y)}let f=m[0],g=m.slice(1),i="",u=n-1+(s-1)*9;i+=p(u,1);let c;if(g.length>0){let a=Math.max(...g.map(l=>Math.max(...l))),o=Math.floor(Math.max(0,Math.min(82,Math.floor(a*166-.5))));c=(o+1)/166,i+=p(o,1)}else c=1,i+=p(0,1);return i+=p(Q(f),4),g.forEach(a=>{i+=p(X(a,c),2)}),i},F=Z;0&&(module.exports={ValidationError,decode,encode,isBlurhashValid});
|
||||
-//# sourceMappingURL=index.js.map
|
||||
\ No newline at end of file
|
||||
+var q=Object.defineProperty,U=Object.getOwnPropertyDescriptor,j=Object.getOwnPropertyNames,D=Object.prototype.hasOwnProperty,$=(t,e)=>{for(var r in e)q(t,r,{get:e[r],enumerable:!0})},H=(t,e,r,a)=>{if(e&&typeof e=="object"||typeof e=="function")for(let o of j(e))!D.call(t,o)&&o!==r&&q(t,o,{get:()=>e[o],enumerable:!(a=U(e,o))||a.enumerable});return t},O=t=>H(q({},"__esModule",{value:!0}),t),_={};$(_,{ValidationError:()=>b,decode:()=>I,encode:()=>F,isBlurhashValid:()=>V}),module.exports=O(_);var C=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","#","$","%","*","+",",","-",".",":",";","=","?","@","[","]","^","_","{","|","}","~"],x=t=>{let e=0;for(let r=0;r<t.length;r++){let a=t[r],o=C.indexOf(a);e=e*83+o}return e},p=(t,e)=>{var r="";for(let a=1;a<=e;a++){let o=Math.floor(t)/Math.pow(83,e-a)%83;r+=C[Math.floor(o)]}return r},h=t=>{let e=t/255;return e<=.04045?e/12.92:Math.pow((e+.055)/1.055,2.4)},M=t=>{let e=Math.max(0,Math.min(1,t));return e<=.0031308?Math.trunc(e*12.92*255+.5):Math.trunc((1.055*Math.pow(e,.4166666666666667)-.055)*255+.5)},S=t=>t<0?-1:1,d=(t,e)=>S(t)*Math.pow(Math.abs(t),e),b=class extends Error{constructor(t){super(t),this.name="ValidationError",this.message=t}},A=t=>{if(!t||t.length<6)throw new b("The blurhash string must be at least 6 characters");let e=x(t[0]),r=Math.floor(e/9)+1,a=e%9+1;if(t.length!==4+2*a*r)throw new b(`blurhash length mismatch: length is ${t.length} but it should be ${4+2*a*r}`)},V=t=>{try{A(t)}catch(e){return{result:!1,errorReason:e.message}}return{result:!0}},W=t=>{let e=t>>16,r=t>>8&255,a=t&255;return[h(e),h(r),h(a)]},k=(t,e)=>{let r=Math.floor(t/361),a=Math.floor(t/19)%19,o=t%19;return[d((r-9)/9,2)*e,d((a-9)/9,2)*e,d((o-9)/9,2)*e]},J=(t,e,r,a)=>{A(t),a=a|1;let o=x(t[0]),m=Math.floor(o/9)+1,c=o%9+1,g=(x(t[1])+1)/166,s=new Array(c*m);for(let l=0;l<s.length;l++)if(l===0){let n=x(t.substring(2,6));s[l]=W(n)}else{let n=x(t.substring(4+l*2,6+l*2));s[l]=k(n,g*a)}let f=e*4,u=new Uint8ClampedArray(f*r);for(let l=0;l<r;l++)for(let n=0;n<e;n++){let i=0,w=0,y=0;for(let P=0;P<m;P++)for(let v=0;v<c;v++){let N=Math.cos(Math.PI*n*v/e)*Math.cos(Math.PI*l*P/r),R=s[v+P*c];i+=R[0]*N,w+=R[1]*N,y+=R[2]*N}let B=M(i),T=M(w),z=M(y);u[4*n+0+l*f]=B,u[4*n+1+l*f]=T,u[4*n+2+l*f]=z,u[4*n+3+l*f]=255}return u},I=J,E=4,K=(t,e,r,a)=>{let o=0,m=0,c=0,g=e*E;for(let f=0;f<e;f++){let u=E*f;for(let l=0;l<r;l++){let n=u+l*g,i=a(f,l);o+=i*h(t[n]),m+=i*h(t[n+1]),c+=i*h(t[n+2])}}let s=1/(e*r);return[o*s,m*s,c*s]},Q=t=>{let e=M(t[0]),r=M(t[1]),a=M(t[2]);return(e<<16)+(r<<8)+a},X=(t,e)=>{let r=Math.floor(Math.max(0,Math.min(18,Math.floor(d(t[0]/e,.5)*9+9.5)))),a=Math.floor(Math.max(0,Math.min(18,Math.floor(d(t[1]/e,.5)*9+9.5)))),o=Math.floor(Math.max(0,Math.min(18,Math.floor(d(t[2]/e,.5)*9+9.5))));return r*19*19+a*19+o},Z=(t,e,r,a,o)=>{if(a<1||a>9||o<1||o>9)throw new b("BlurHash must have between 1 and 9 components");if(Math.floor(e*r*4)!==t.length)throw new b("Width and height must match the pixels array");let m=[];for(let l=0;l<o;l++)for(let n=0;n<a;n++){let i=n==0&&l==0?1:2,w=K(t,e,r,(y,B)=>i*Math.cos(Math.PI*n*y/e)*Math.cos(Math.PI*l*B/r));m.push(w)}let c=m[0],g=m.slice(1),s="",f=a-1+(o-1)*9;s+=p(f,1);let u;if(g.length>0){let l=Math.max(...g.map(i=>Math.max(...i))),n=Math.floor(Math.max(0,Math.min(82,Math.floor(l*166-.5))));u=(n+1)/166,s+=p(n,1)}else u=1,s+=p(0,1);return s+=p(Q(c),4),g.forEach(l=>{s+=p(X(l,u),2)}),s},F=Z;
|
||||
diff --git a/dist/index.mjs b/dist/index.mjs
|
||||
index 0feea2d84b8d1ed0f05386aaf9bb1d278aed3d0a..06380b72abb031372b5b176078bb7199f62d62d1 100644
|
||||
--- a/dist/index.mjs
|
||||
+++ b/dist/index.mjs
|
||||
@@ -1,2 +1 @@
|
||||
-var q=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","#","$","%","*","+",",","-",".",":",";","=","?","@","[","]","^","_","{","|","}","~"],x=t=>{let e=0;for(let r=0;r<t.length;r++){let n=t[r],l=q.indexOf(n);e=e*83+l}return e},p=(t,e)=>{var r="";for(let n=1;n<=e;n++){let l=Math.floor(t)/Math.pow(83,e-n)%83;r+=q[Math.floor(l)]}return r};var f=t=>{let e=t/255;return e<=.04045?e/12.92:Math.pow((e+.055)/1.055,2.4)},h=t=>{let e=Math.max(0,Math.min(1,t));return e<=.0031308?Math.trunc(e*12.92*255+.5):Math.trunc((1.055*Math.pow(e,.4166666666666667)-.055)*255+.5)},F=t=>t<0?-1:1,M=(t,e)=>F(t)*Math.pow(Math.abs(t),e);var d=class extends Error{constructor(e){super(e),this.name="ValidationError",this.message=e}};var C=t=>{if(!t||t.length<6)throw new d("The blurhash string must be at least 6 characters");let e=x(t[0]),r=Math.floor(e/9)+1,n=e%9+1;if(t.length!==4+2*n*r)throw new d(`blurhash length mismatch: length is ${t.length} but it should be ${4+2*n*r}`)},N=t=>{try{C(t)}catch(e){return{result:!1,errorReason:e.message}}return{result:!0}},z=t=>{let e=t>>16,r=t>>8&255,n=t&255;return[f(e),f(r),f(n)]},L=(t,e)=>{let r=Math.floor(t/361),n=Math.floor(t/19)%19,l=t%19;return[M((r-9)/9,2)*e,M((n-9)/9,2)*e,M((l-9)/9,2)*e]},U=(t,e,r,n)=>{C(t),n=n|1;let l=x(t[0]),m=Math.floor(l/9)+1,b=l%9+1,i=(x(t[1])+1)/166,u=new Array(b*m);for(let o=0;o<u.length;o++)if(o===0){let a=x(t.substring(2,6));u[o]=z(a)}else{let a=x(t.substring(4+o*2,6+o*2));u[o]=L(a,i*n)}let c=e*4,s=new Uint8ClampedArray(c*r);for(let o=0;o<r;o++)for(let a=0;a<e;a++){let y=0,B=0,R=0;for(let w=0;w<m;w++)for(let P=0;P<b;P++){let G=Math.cos(Math.PI*a*P/e)*Math.cos(Math.PI*o*w/r),T=u[P+w*b];y+=T[0]*G,B+=T[1]*G,R+=T[2]*G}let V=h(y),I=h(B),E=h(R);s[4*a+0+o*c]=V,s[4*a+1+o*c]=I,s[4*a+2+o*c]=E,s[4*a+3+o*c]=255}return s},j=U;var A=4,D=(t,e,r,n)=>{let l=0,m=0,b=0,g=e*A;for(let u=0;u<e;u++){let c=A*u;for(let s=0;s<r;s++){let o=c+s*g,a=n(u,s);l+=a*f(t[o]),m+=a*f(t[o+1]),b+=a*f(t[o+2])}}let i=1/(e*r);return[l*i,m*i,b*i]},$=t=>{let e=h(t[0]),r=h(t[1]),n=h(t[2]);return(e<<16)+(r<<8)+n},H=(t,e)=>{let r=Math.floor(Math.max(0,Math.min(18,Math.floor(M(t[0]/e,.5)*9+9.5)))),n=Math.floor(Math.max(0,Math.min(18,Math.floor(M(t[1]/e,.5)*9+9.5)))),l=Math.floor(Math.max(0,Math.min(18,Math.floor(M(t[2]/e,.5)*9+9.5))));return r*19*19+n*19+l},O=(t,e,r,n,l)=>{if(n<1||n>9||l<1||l>9)throw new d("BlurHash must have between 1 and 9 components");if(e*r*4!==t.length)throw new d("Width and height must match the pixels array");let m=[];for(let s=0;s<l;s++)for(let o=0;o<n;o++){let a=o==0&&s==0?1:2,y=D(t,e,r,(B,R)=>a*Math.cos(Math.PI*o*B/e)*Math.cos(Math.PI*s*R/r));m.push(y)}let b=m[0],g=m.slice(1),i="",u=n-1+(l-1)*9;i+=p(u,1);let c;if(g.length>0){let s=Math.max(...g.map(a=>Math.max(...a))),o=Math.floor(Math.max(0,Math.min(82,Math.floor(s*166-.5))));c=(o+1)/166,i+=p(o,1)}else c=1,i+=p(0,1);return i+=p($(b),4),g.forEach(s=>{i+=p(H(s,c),2)}),i},S=O;export{d as ValidationError,j as decode,S as encode,N as isBlurhashValid};
|
||||
-//# sourceMappingURL=index.mjs.map
|
||||
\ No newline at end of file
|
||||
+var A=["0","1","2","3","4","5","6","7","8","9","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","#","$","%","*","+",",","-",".",":",";","=","?","@","[","]","^","_","{","|","}","~"],d=t=>{let r=0;for(let a=0;a<t.length;a++){let l=t[a],o=A.indexOf(l);r=r*83+o}return r},b=(t,r)=>{var a="";for(let l=1;l<=r;l++){let o=Math.floor(t)/Math.pow(83,r-l)%83;a+=A[Math.floor(o)]}return a},c=t=>{let r=t/255;return r<=.04045?r/12.92:Math.pow((r+.055)/1.055,2.4)},g=t=>{let r=Math.max(0,Math.min(1,t));return r<=.0031308?Math.trunc(r*12.92*255+.5):Math.trunc((1.055*Math.pow(r,.4166666666666667)-.055)*255+.5)},O=t=>t<0?-1:1,w=(t,r)=>O(t)*Math.pow(Math.abs(t),r),p=class extends Error{constructor(t){super(t),this.name="ValidationError",this.message=t}},B=t=>{if(!t||t.length<6)throw new p("The blurhash string must be at least 6 characters");let r=d(t[0]),a=Math.floor(r/9)+1,l=r%9+1;if(t.length!==4+2*l*a)throw new p(`blurhash length mismatch: length is ${t.length} but it should be ${4+2*l*a}`)},R=t=>{try{B(t)}catch(r){return{result:!1,errorReason:r.message}}return{result:!0}},T=t=>{let r=t>>16,a=t>>8&255,l=t&255;return[c(r),c(a),c(l)]},U=(t,r)=>{let a=Math.floor(t/361),l=Math.floor(t/19)%19,o=t%19;return[w((a-9)/9,2)*r,w((l-9)/9,2)*r,w((o-9)/9,2)*r]},j=(t,r,a,l)=>{B(t),l=l|1;let o=d(t[0]),i=Math.floor(o/9)+1,u=o%9+1,m=(d(t[1])+1)/166,n=new Array(u*i);for(let e=0;e<n.length;e++)if(e===0){let h=d(t.substring(2,6));n[e]=T(h)}else{let h=d(t.substring(4+e*2,6+e*2));n[e]=U(h,m*l)}let s=r*4,M=new Uint8ClampedArray(s*a);for(let e=0;e<a;e++)for(let h=0;h<r;h++){let f=0,x=0,v=0;for(let y=0;y<i;y++)for(let E=0;E<u;E++){let P=Math.cos(Math.PI*h*E/r)*Math.cos(Math.PI*e*y/a),V=n[E+y*u];f+=V[0]*P,x+=V[1]*P,v+=V[2]*P}let I=g(f),C=g(x),H=g(v);M[4*h+0+e*s]=I,M[4*h+1+e*s]=C,M[4*h+2+e*s]=H,M[4*h+3+e*s]=255}return M},q=j,$=4,z=(t,r,a,l)=>{let o=0,i=0,u=0,m=r*$;for(let s=0;s<r;s++){let M=$*s;for(let e=0;e<a;e++){let h=M+e*m,f=l(s,e);o+=f*c(t[h]),i+=f*c(t[h+1]),u+=f*c(t[h+2])}}let n=1/(r*a);return[o*n,i*n,u*n]},D=t=>{let r=g(t[0]),a=g(t[1]),l=g(t[2]);return(r<<16)+(a<<8)+l},F=(t,r)=>{let a=Math.floor(Math.max(0,Math.min(18,Math.floor(w(t[0]/r,.5)*9+9.5)))),l=Math.floor(Math.max(0,Math.min(18,Math.floor(w(t[1]/r,.5)*9+9.5)))),o=Math.floor(Math.max(0,Math.min(18,Math.floor(w(t[2]/r,.5)*9+9.5))));return a*19*19+l*19+o},G=(t,r,a,l,o)=>{if(l<1||l>9||o<1||o>9)throw new p("BlurHash must have between 1 and 9 components");if(Math.floor(r*a*4)!==t.length)throw new p("Width and height must match the pixels array");let i=[];for(let e=0;e<o;e++)for(let h=0;h<l;h++){let f=h==0&&e==0?1:2,x=z(t,r,a,(v,I)=>f*Math.cos(Math.PI*h*v/r)*Math.cos(Math.PI*e*I/a));i.push(x)}let u=i[0],m=i.slice(1),n="",s=l-1+(o-1)*9;n+=b(s,1);let M;if(m.length>0){let e=Math.max(...m.map(f=>Math.max(...f))),h=Math.floor(Math.max(0,Math.min(82,Math.floor(e*166-.5))));M=(h+1)/166,n+=b(h,1)}else M=1,n+=b(0,1);return n+=b(D(u),4),m.forEach(e=>{n+=b(F(e,M),2)}),n},L=G;export{p as ValidationError,q as decode,L as encode,R as isBlurhashValid};
|
||||
3107
pnpm-lock.yaml
generated
3107
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
2
pnpm-workspace.yaml
Normal file
2
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,2 @@
|
||||
patchedDependencies:
|
||||
blurhash: patches/blurhash.patch
|
||||
5
src/consumet.ts
Normal file
5
src/consumet.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { ANIME, META } from "@consumet/extensions";
|
||||
import fetchAdapter from "@haverstack/axios-fetch-adapter";
|
||||
|
||||
const gogoAnime = new ANIME.Gogoanime(undefined, undefined, fetchAdapter);
|
||||
export const aniList = new META.Anilist(gogoAnime, undefined, fetchAdapter);
|
||||
31
src/context.ts
Normal file
31
src/context.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import type { Context as HonoContext } from "hono";
|
||||
|
||||
export interface GraphQLContext {
|
||||
db: D1Database;
|
||||
deviceId?: string;
|
||||
aniListToken?: string;
|
||||
user: { id: number; name: string } | null;
|
||||
honoContext: HonoContext;
|
||||
}
|
||||
|
||||
export async function createGraphQLContext(
|
||||
c: HonoContext<Env>,
|
||||
): Promise<GraphQLContext> {
|
||||
const deviceId = c.req.header("X-Device-ID");
|
||||
const aniListToken = c.req.header("X-AniList-Token");
|
||||
const env = c.env as Env;
|
||||
|
||||
let user: GraphQLContext["user"] = null;
|
||||
if (aniListToken) {
|
||||
const stub = await env.ANILIST_DO.getByName("GLOBAL");
|
||||
user = await stub.getUser(aniListToken!);
|
||||
}
|
||||
|
||||
return {
|
||||
db: env.DB,
|
||||
deviceId,
|
||||
aniListToken,
|
||||
user,
|
||||
honoContext: c,
|
||||
};
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
import { graphql } from "gql.tada";
|
||||
import { GraphQLClient } from "graphql-request";
|
||||
|
||||
import { sleep } from "~/libs/sleep";
|
||||
|
||||
const GetWatchingTitlesQuery = graphql(`
|
||||
query GetWatchingTitles($userName: String!, $page: Int!) {
|
||||
Page(page: $page, perPage: 50) {
|
||||
mediaList(
|
||||
userName: $userName
|
||||
type: ANIME
|
||||
sort: UPDATED_TIME_DESC
|
||||
status_in: [CURRENT, REPEATING, PLANNING]
|
||||
) {
|
||||
media {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
english
|
||||
userPreferred
|
||||
}
|
||||
description
|
||||
episodes
|
||||
genres
|
||||
status
|
||||
bannerImage
|
||||
averageScore
|
||||
coverImage {
|
||||
extraLarge
|
||||
large
|
||||
medium
|
||||
}
|
||||
countryOfOrigin
|
||||
mediaListEntry {
|
||||
id
|
||||
progress
|
||||
status
|
||||
updatedAt
|
||||
}
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
currentPage
|
||||
hasNextPage
|
||||
perPage
|
||||
total
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export function getWatchingTitles(
|
||||
username: string,
|
||||
page: number,
|
||||
aniListToken: string,
|
||||
): Promise<GetWatchingTitles> {
|
||||
const client = new GraphQLClient("https://graphql.anilist.co/");
|
||||
|
||||
return client
|
||||
.request(
|
||||
GetWatchingTitlesQuery,
|
||||
{ userName: username, page },
|
||||
{ Authorization: `Bearer ${aniListToken}` },
|
||||
)
|
||||
.then((data) => data?.Page!)
|
||||
.catch((err) => {
|
||||
console.error("Failed to get watching titles");
|
||||
console.error(err);
|
||||
|
||||
const response = err.response;
|
||||
if (response.status === 429) {
|
||||
console.log("429, retrying in", response.headers.get("Retry-After"));
|
||||
return sleep(Number(response.headers.get("Retry-After")!) * 1000).then(
|
||||
() => getWatchingTitles(username, page, aniListToken),
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
type GetWatchingTitles = {
|
||||
mediaList:
|
||||
| ({
|
||||
media: {
|
||||
id: number;
|
||||
idMal: number | null;
|
||||
title: {
|
||||
english: string | null;
|
||||
userPreferred: string | null;
|
||||
} | null;
|
||||
description: string | null;
|
||||
episodes: number | null;
|
||||
genres: (string | null)[] | null;
|
||||
status:
|
||||
| "FINISHED"
|
||||
| "RELEASING"
|
||||
| "NOT_YET_RELEASED"
|
||||
| "CANCELLED"
|
||||
| "HIATUS"
|
||||
| null;
|
||||
bannerImage: string | null;
|
||||
averageScore: number | null;
|
||||
coverImage: {
|
||||
extraLarge: string | null;
|
||||
large: string | null;
|
||||
medium: string | null;
|
||||
} | null;
|
||||
countryOfOrigin: unknown;
|
||||
mediaListEntry: {
|
||||
id: number;
|
||||
progress: number | null;
|
||||
status:
|
||||
| "CURRENT"
|
||||
| "REPEATING"
|
||||
| "PLANNING"
|
||||
| "COMPLETED"
|
||||
| "DROPPED"
|
||||
| "PAUSED"
|
||||
| null;
|
||||
updatedAt: number;
|
||||
} | null;
|
||||
nextAiringEpisode: {
|
||||
timeUntilAiring: number;
|
||||
airingAt: number;
|
||||
episode: number;
|
||||
} | null;
|
||||
} | null;
|
||||
} | null)[]
|
||||
| null;
|
||||
pageInfo: {
|
||||
currentPage: number | null;
|
||||
hasNextPage: boolean | null;
|
||||
perPage: number | null;
|
||||
total: number | null;
|
||||
} | null;
|
||||
};
|
||||
@@ -1,214 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
import { streamSSE } from "hono/streaming";
|
||||
|
||||
import { fetchEpisodes } from "~/controllers/episodes/getByAniListId";
|
||||
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
|
||||
import { associateDeviceIdWithUsername } from "~/models/token";
|
||||
import { setWatchStatus } from "~/models/watchStatus";
|
||||
import { EpisodesResponseSchema } from "~/types/episode";
|
||||
import { ErrorResponse, ErrorResponseSchema } from "~/types/schema";
|
||||
import { Title } from "~/types/title";
|
||||
|
||||
import { getUser } from "./getUser";
|
||||
import { getWatchingTitles } from "./getWatchingTitles";
|
||||
|
||||
const UserSchema = z.object({
|
||||
name: z.string(),
|
||||
avatar: z.object({
|
||||
medium: z.string().nullable(),
|
||||
large: z.string(),
|
||||
}),
|
||||
statistics: z.object({
|
||||
minutesWatched: z.number().openapi({ type: "integer", format: "int64" }),
|
||||
episodesWatched: z.number().int(),
|
||||
count: z.number().int(),
|
||||
meanScore: z.number().openapi({ type: "number", format: "float" }),
|
||||
}),
|
||||
});
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "auth"],
|
||||
summary:
|
||||
"Authenticate with AniList and return all upcoming and 'currently watching' titles",
|
||||
operationId: "authenticateAniList",
|
||||
method: "get",
|
||||
path: "/",
|
||||
request: {
|
||||
headers: z.object({
|
||||
"x-anilist-token": z.string(),
|
||||
"x-aniplay-device-id": z.string(),
|
||||
}),
|
||||
// Uncomment when testing locally
|
||||
// headers: z.object({
|
||||
// "x-anilist-token":
|
||||
// process.env.NODE_ENV === "production"
|
||||
// ? z.string()
|
||||
// : z.string().optional(),
|
||||
// "x-aniplay-device-id":
|
||||
// process.env.NODE_ENV === "production"
|
||||
// ? z.string()
|
||||
// : z.string().optional(),
|
||||
// }),
|
||||
// query: z.object({
|
||||
// aniListToken: z.string().optional(),
|
||||
// deviceId: z.string().optional(),
|
||||
// }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"text/event-stream": {
|
||||
schema: z.union([
|
||||
z.object({ title: Title, episodes: EpisodesResponseSchema }),
|
||||
UserSchema,
|
||||
]),
|
||||
},
|
||||
},
|
||||
description: "Streams a list of titles",
|
||||
},
|
||||
401: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Failed to authenticate with AniList",
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Error fetching episodes",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const app = new OpenAPIHono<Cloudflare.Env>();
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const deviceId =
|
||||
c.req.header("X-Aniplay-Device-Id") ?? c.req.query("deviceId");
|
||||
const aniListToken =
|
||||
c.req.header("X-AniList-Token") ?? c.req.query("aniListToken");
|
||||
|
||||
if (!aniListToken) {
|
||||
return c.json(ErrorResponse, { status: 401 });
|
||||
}
|
||||
|
||||
let user: Awaited<ReturnType<typeof getUser>>;
|
||||
try {
|
||||
user = await getUser(aniListToken);
|
||||
if (!user) {
|
||||
return c.json(ErrorResponse, { status: 401 });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to authenticate with AniList");
|
||||
console.error(error);
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
try {
|
||||
await associateDeviceIdWithUsername(deviceId!, user.name!);
|
||||
} catch (error) {
|
||||
console.error("Failed to associate device");
|
||||
console.error(error);
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
c.header("Content-Type", "text/x-unknown");
|
||||
c.header("content-encoding", "identity");
|
||||
c.header("transfer-encoding", "chunked");
|
||||
return streamSSE(
|
||||
c,
|
||||
async (stream) => {
|
||||
await stream.writeSSE({ event: "user", data: JSON.stringify(user) });
|
||||
|
||||
let currentPage = 1;
|
||||
let hasNextPage = true;
|
||||
|
||||
do {
|
||||
const stub = env.ANILIST_DO.getByName(user.name!);
|
||||
const { mediaList, pageInfo } = await stub
|
||||
.getTitles(
|
||||
user.name!,
|
||||
currentPage++,
|
||||
["CURRENT", "PLANNING", "PAUSED", "REPEATING"],
|
||||
aniListToken,
|
||||
)
|
||||
.then((data) => data!);
|
||||
if (!mediaList) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (!(pageInfo?.hasNextPage ?? false) && (pageInfo?.total ?? 0) > 0) {
|
||||
await stream.writeSSE({
|
||||
event: "count",
|
||||
data: pageInfo!.total.toString(),
|
||||
});
|
||||
}
|
||||
|
||||
for (const mediaObj of mediaList) {
|
||||
const media = mediaObj?.media;
|
||||
if (!media) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const mediaListEntry = media.mediaListEntry;
|
||||
if (mediaListEntry) {
|
||||
const { wasAdded } = await setWatchStatus(
|
||||
deviceId!,
|
||||
media.id,
|
||||
mediaListEntry.status,
|
||||
);
|
||||
if (wasAdded) {
|
||||
await maybeScheduleNextAiringEpisode(media.id);
|
||||
}
|
||||
}
|
||||
|
||||
const nextEpisode = media.nextAiringEpisode?.episode;
|
||||
if (
|
||||
nextEpisode === 0 ||
|
||||
nextEpisode === 1 ||
|
||||
media.status === "NOT_YET_RELEASED"
|
||||
) {
|
||||
await stream.writeSSE({
|
||||
event: "title",
|
||||
data: JSON.stringify({ title: media, episodes: [] }),
|
||||
id: media.id.toString(),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
await fetchEpisodes(media.id, true).then((episodes) => {
|
||||
if (episodes.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
return stream.writeSSE({
|
||||
event: "title",
|
||||
data: JSON.stringify({ title: media, episodes }),
|
||||
id: media.id.toString(),
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
hasNextPage = pageInfo?.hasNextPage ?? false;
|
||||
console.log(hasNextPage);
|
||||
} while (hasNextPage);
|
||||
|
||||
// send end event instead of closing the connection to let the client know that the stream didn't end abruptly
|
||||
await stream.writeSSE({ event: "end", data: "end" });
|
||||
console.log("completed");
|
||||
},
|
||||
async (err, stream) => {
|
||||
console.error("Error occurred in SSE");
|
||||
console.error(err);
|
||||
await stream.writeln("An error occurred");
|
||||
await stream.close();
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,10 +0,0 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
app.route(
|
||||
"/anilist",
|
||||
await import("./anilist").then((controller) => controller.default),
|
||||
);
|
||||
|
||||
export default app;
|
||||
@@ -1,74 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import { EpisodesResponseSchema } from "~/types/episode";
|
||||
import {
|
||||
AniListIdQuerySchema,
|
||||
ErrorResponse,
|
||||
ErrorResponseSchema,
|
||||
} from "~/types/schema";
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "episodes"],
|
||||
summary: "Fetch episodes for a title",
|
||||
operationId: "fetchEpisodes",
|
||||
method: "get",
|
||||
path: "/{aniListId}",
|
||||
request: {
|
||||
params: z.object({ aniListId: AniListIdQuerySchema }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: EpisodesResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Returns a list of episodes",
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Error fetching episodes",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const app = new OpenAPIHono<Cloudflare.Env>();
|
||||
|
||||
export function fetchEpisodes(aniListId: number, shouldRetry: boolean = false) {
|
||||
return import("./aniwatch")
|
||||
.then(({ getEpisodesFromAniwatch }) =>
|
||||
getEpisodesFromAniwatch(aniListId, shouldRetry),
|
||||
)
|
||||
.then((episodeResults) => episodeResults?.episodes ?? []);
|
||||
}
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const aniListId = Number(c.req.param("aniListId"));
|
||||
|
||||
// Check if we should use mock data
|
||||
const { useMockData } = await import("~/libs/useMockData");
|
||||
if (useMockData()) {
|
||||
const { mockEpisodes } = await import("~/mocks");
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
result: { providerId: "aniwatch", episodes: mockEpisodes() },
|
||||
});
|
||||
}
|
||||
|
||||
const episodes = await fetchEpisodes(aniListId);
|
||||
if (episodes.length === 0) {
|
||||
return c.json(ErrorResponse, { status: 404 });
|
||||
}
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
result: { providerId: "aniwatch", episodes },
|
||||
});
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,95 +0,0 @@
|
||||
import { env } from "cloudflare:test";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
// Mock useMockData
|
||||
vi.mock("~/libs/useMockData", () => ({ useMockData: () => false }));
|
||||
|
||||
describe('requests the "/episodes/:id/url" route', () => {
|
||||
let app: typeof import("../../../src/index").app;
|
||||
let fetchEpisodes: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
|
||||
vi.doMock("../getByAniListId", async (importOriginal) => {
|
||||
const actual = await importOriginal<any>();
|
||||
return {
|
||||
...actual,
|
||||
fetchEpisodes: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock aniwatch initially as empty mock
|
||||
vi.doMock("./aniwatch", () => ({ getSourcesFromAniwatch: vi.fn() }));
|
||||
|
||||
app = (await import("~/index")).app;
|
||||
fetchEpisodes = (await import("../getByAniListId")).fetchEpisodes;
|
||||
});
|
||||
|
||||
it("with sources from Aniwatch", async () => {
|
||||
vi.mocked(fetchEpisodes).mockResolvedValue([{ id: "ep1", number: 1 }]);
|
||||
|
||||
const mockSource = {
|
||||
source:
|
||||
"https://www032.vipanicdn.net/streamhls/aa804a2400535d84dd59454b28d329fb/ep.1.1712504065.m3u8",
|
||||
subtitles: [],
|
||||
audio: [],
|
||||
};
|
||||
|
||||
// Since controller uses dynamic import, doMock SHOULD affect it if we set it up before the call
|
||||
// Wait, doMock inside test block might be tricky if we don't re-import the module using it?
|
||||
// BUT the controller uses `import("./aniwatch")`, causing a fresh import (if cache invalid?)
|
||||
// Or if `vi.doMock` updates the registry.
|
||||
// In Vitest, doMock updates the registry for NEXT imports.
|
||||
// So `import("./aniwatch")` should pick it up.
|
||||
|
||||
vi.doMock("./aniwatch", () => ({
|
||||
getSourcesFromAniwatch: vi.fn().mockResolvedValue(mockSource),
|
||||
}));
|
||||
|
||||
const response = await app.request(
|
||||
"/episodes/4/url",
|
||||
{
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
episodeNumber: 1,
|
||||
}),
|
||||
headers: { "Content-Type": "application/json" },
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
const json = await response.json();
|
||||
expect(json).toEqual({
|
||||
success: true,
|
||||
result: mockSource,
|
||||
});
|
||||
});
|
||||
|
||||
it("with no URL from Aniwatch source", async () => {
|
||||
vi.mocked(fetchEpisodes).mockResolvedValue([{ id: "ep1", number: 1 }]);
|
||||
|
||||
// Make mock return null
|
||||
vi.doMock("./aniwatch", () => ({
|
||||
getSourcesFromAniwatch: vi.fn().mockResolvedValue(null),
|
||||
}));
|
||||
|
||||
const response = await app.request(
|
||||
"/episodes/4/url",
|
||||
{
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
episodeNumber: 1, // Exists in episodes, but source returns null
|
||||
}),
|
||||
headers: { "Content-Type": "application/json" },
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
const json = await response.json();
|
||||
expect(json).toEqual({
|
||||
success: false,
|
||||
});
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
});
|
||||
@@ -1,146 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import { FetchUrlResponse } from "~/types/episode/fetch-url-response";
|
||||
import {
|
||||
AniListIdQuerySchema,
|
||||
EpisodeNumberSchema,
|
||||
ErrorResponse,
|
||||
ErrorResponseSchema,
|
||||
} from "~/types/schema";
|
||||
|
||||
import { fetchEpisodes } from "../getByAniListId";
|
||||
|
||||
const FetchUrlRequest = z.object({ episodeNumber: EpisodeNumberSchema });
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "episodes"],
|
||||
summary: "Fetch stream URL for an episode",
|
||||
operationId: "fetchStreamUrl",
|
||||
method: "post",
|
||||
path: "/{aniListId}/url",
|
||||
request: {
|
||||
params: z.object({ aniListId: AniListIdQuerySchema }),
|
||||
body: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: FetchUrlRequest,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: FetchUrlResponse,
|
||||
},
|
||||
},
|
||||
description: "Returns a stream URL",
|
||||
},
|
||||
400: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Unknown provider",
|
||||
},
|
||||
404: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Provider did not return a source",
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Failed to fetch stream URL from provider",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const app = new OpenAPIHono<Cloudflare.Env>();
|
||||
|
||||
export async function fetchEpisodeUrl({
|
||||
id,
|
||||
aniListId,
|
||||
episodeNumber,
|
||||
}:
|
||||
| { id: string; aniListId?: number; episodeNumber?: number }
|
||||
| {
|
||||
id?: string;
|
||||
aniListId: number;
|
||||
episodeNumber: number;
|
||||
}): Promise<FetchUrlResponse | null> {
|
||||
try {
|
||||
let episodeId = id;
|
||||
if (!id) {
|
||||
const episodes = await fetchEpisodes(aniListId!);
|
||||
if (episodes.length === 0) {
|
||||
console.error(`Failed to fetch episodes for title ${aniListId}`);
|
||||
return null;
|
||||
}
|
||||
const episode = episodes.find(
|
||||
(episode) => episode.number === episodeNumber,
|
||||
);
|
||||
if (!episode) {
|
||||
console.error(
|
||||
`Episode ${episodeNumber} not found for title ${aniListId}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
episodeId = episode.id;
|
||||
}
|
||||
|
||||
const result = await import("./aniwatch").then(
|
||||
({ getSourcesFromAniwatch }) => getSourcesFromAniwatch(episodeId!),
|
||||
);
|
||||
if (!result) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (e) {
|
||||
console.error("Failed to fetch download URL from Aniwatch", e);
|
||||
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const aniListId = Number(c.req.param("aniListId"));
|
||||
const { episodeNumber } = await c.req.json<typeof FetchUrlRequest._type>();
|
||||
if (episodeNumber == undefined) {
|
||||
return c.json(ErrorResponse, { status: 400 });
|
||||
}
|
||||
|
||||
// Check if we should use mock data
|
||||
const { useMockData } = await import("~/libs/useMockData");
|
||||
if (useMockData()) {
|
||||
const { mockEpisodeUrl } = await import("~/mocks");
|
||||
|
||||
return c.json({ success: true, result: mockEpisodeUrl });
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(
|
||||
`Fetching episode URL for aniListId: ${aniListId}, episodeNumber: ${episodeNumber}`,
|
||||
);
|
||||
const fetchUrlResult = await fetchEpisodeUrl({ aniListId, episodeNumber });
|
||||
if (!fetchUrlResult) {
|
||||
return c.json(ErrorResponse, { status: 404 });
|
||||
}
|
||||
|
||||
return c.json({ success: true, result: fetchUrlResult });
|
||||
} catch (error) {
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,20 +0,0 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
app.route(
|
||||
"/",
|
||||
await import("./getByAniListId").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/",
|
||||
await import("./getEpisodeUrl").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/",
|
||||
await import("./markEpisodeAsWatched").then(
|
||||
(controller) => controller.default,
|
||||
),
|
||||
);
|
||||
|
||||
export default app;
|
||||
@@ -1,103 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
import { env } from "hono/adapter";
|
||||
|
||||
import { updateWatchStatus } from "~/controllers/watch-status";
|
||||
import {
|
||||
AniListIdQuerySchema,
|
||||
EpisodeNumberSchema,
|
||||
ErrorResponse,
|
||||
ErrorResponseSchema,
|
||||
SuccessResponseSchema,
|
||||
} from "~/types/schema";
|
||||
import { User } from "~/types/user";
|
||||
|
||||
import { markEpisodeAsWatched } from "./anilist";
|
||||
|
||||
const MarkEpisodeAsWatchedRequest = z.object({
|
||||
episodeNumber: EpisodeNumberSchema,
|
||||
isComplete: z.boolean(),
|
||||
});
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "episodes"],
|
||||
summary: "Mark episode as watched",
|
||||
operationId: "markEpisodeAsWatched",
|
||||
method: "post",
|
||||
path: "/{aniListId}/watched",
|
||||
request: {
|
||||
params: z.object({ aniListId: AniListIdQuerySchema }),
|
||||
body: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: MarkEpisodeAsWatchedRequest,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: SuccessResponseSchema(User),
|
||||
},
|
||||
},
|
||||
description: "Returns whether the episode was marked as watched",
|
||||
},
|
||||
401: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Unauthorized to mark the episode as watched",
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Error marking episode as watched",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const app = new OpenAPIHono<Cloudflare.Env>();
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const aniListToken = c.req.header("X-AniList-Token");
|
||||
|
||||
if (!aniListToken) {
|
||||
return c.json(ErrorResponse, { status: 401 });
|
||||
}
|
||||
|
||||
const deviceId = c.req.header("X-Aniplay-Device-Id")!;
|
||||
const aniListId = Number(c.req.param("aniListId"));
|
||||
const { episodeNumber, isComplete } =
|
||||
await c.req.json<typeof MarkEpisodeAsWatchedRequest._type>();
|
||||
|
||||
try {
|
||||
const user = await markEpisodeAsWatched(
|
||||
aniListToken,
|
||||
aniListId,
|
||||
episodeNumber,
|
||||
isComplete,
|
||||
);
|
||||
if (isComplete) {
|
||||
await updateWatchStatus(deviceId, aniListId, "COMPLETED");
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
console.error("Failed to mark episode as watched - user not found?");
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
return c.json({ success: true, result: user }, 200);
|
||||
} catch (error) {
|
||||
console.error("Failed to mark episode as watched");
|
||||
console.error(error);
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,11 +0,0 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { app } from "~/index";
|
||||
|
||||
describe("Health Check", () => {
|
||||
it("should return { success: true }", async () => {
|
||||
const res = await app.request("/");
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
});
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
import { Hono } from "hono";
|
||||
|
||||
import { SuccessResponse } from "~/types/schema";
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
app.get("/", (c) => c.json(SuccessResponse, 200));
|
||||
|
||||
export default app;
|
||||
@@ -1,14 +0,0 @@
|
||||
import { Hono } from "hono";
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
app.route(
|
||||
"/new-episode",
|
||||
await import("./new-episode").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/upcoming-titles",
|
||||
await import("./upcoming-titles").then((controller) => controller.default),
|
||||
);
|
||||
|
||||
export default app;
|
||||
@@ -1,75 +0,0 @@
|
||||
import { env } from "cloudflare:workers";
|
||||
import type { HonoRequest } from "hono";
|
||||
import { DateTime } from "luxon";
|
||||
|
||||
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
|
||||
import { getValue, setValue } from "~/models/kv";
|
||||
import { filterUnreleasedTitles } from "~/models/unreleasedTitles";
|
||||
import type { Title } from "~/types/title";
|
||||
|
||||
type AiringSchedule = {
|
||||
media: Title;
|
||||
episode: number;
|
||||
timeUntilAiring: number;
|
||||
airingAt: number;
|
||||
id: number;
|
||||
};
|
||||
|
||||
export async function getUpcomingTitlesFromAnilist() {
|
||||
const durableObjectId = env.ANILIST_DO.idFromName("GLOBAL");
|
||||
const stub = env.ANILIST_DO.get(durableObjectId);
|
||||
|
||||
const lastCheckedScheduleAt = await getValue("schedule_last_checked_at").then(
|
||||
(value) => (value ? Number(value) : DateTime.now().toUnixInteger()),
|
||||
);
|
||||
const twoDaysFromNow = DateTime.now().plus({ days: 2 }).toUnixInteger();
|
||||
|
||||
let currentPage = 1;
|
||||
let plannedToWatchTitles = new Set<number>();
|
||||
let scheduleList: AiringSchedule[] = [];
|
||||
let shouldContinue = true;
|
||||
|
||||
do {
|
||||
const Page = await stub.getUpcomingTitles(
|
||||
currentPage++,
|
||||
lastCheckedScheduleAt,
|
||||
twoDaysFromNow,
|
||||
);
|
||||
|
||||
if (!Page) break;
|
||||
|
||||
const { airingSchedules, pageInfo } = Page;
|
||||
plannedToWatchTitles = plannedToWatchTitles.union(
|
||||
await filterUnreleasedTitles(
|
||||
airingSchedules!.map((schedule: any) => schedule!.media?.id!),
|
||||
),
|
||||
);
|
||||
scheduleList = scheduleList.concat(
|
||||
airingSchedules!.filter(
|
||||
(schedule: any): schedule is AiringSchedule =>
|
||||
!!schedule &&
|
||||
!plannedToWatchTitles.has(schedule.media?.id) &&
|
||||
schedule.media?.countryOfOrigin === "JP" &&
|
||||
schedule.episode == 1,
|
||||
),
|
||||
);
|
||||
shouldContinue = pageInfo?.hasNextPage ?? false;
|
||||
} while (shouldContinue);
|
||||
|
||||
await Promise.all(
|
||||
Array.from(plannedToWatchTitles).map((titleId) =>
|
||||
maybeScheduleNextAiringEpisode(titleId),
|
||||
),
|
||||
);
|
||||
|
||||
if (scheduleList.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
await setValue(
|
||||
"schedule_last_checked_at",
|
||||
scheduleList[scheduleList.length - 1].airingAt.toString(),
|
||||
);
|
||||
|
||||
return scheduleList;
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import { Hono } from "hono";
|
||||
import { DateTime } from "luxon";
|
||||
|
||||
import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
|
||||
import { sendFcmMessage } from "~/libs/gcloud/sendFcmMessage";
|
||||
import { SuccessResponse } from "~/types/schema";
|
||||
|
||||
import { getUpcomingTitlesFromAnilist } from "./anilist";
|
||||
|
||||
const app = new Hono();
|
||||
|
||||
export async function checkUpcomingTitles() {
|
||||
const titles = await getUpcomingTitlesFromAnilist();
|
||||
|
||||
await Promise.allSettled(
|
||||
titles.map(async (title) => {
|
||||
const titleName =
|
||||
title.media.title?.userPreferred ??
|
||||
title.media.title?.english ??
|
||||
"Unknown Title";
|
||||
|
||||
return sendFcmMessage(getAdminSdkCredentials(), {
|
||||
topic: "newTitles",
|
||||
data: {
|
||||
type: "new_title",
|
||||
aniListId: title.media.id.toString(),
|
||||
title: titleName,
|
||||
airingAt: title.airingAt.toString(),
|
||||
},
|
||||
notification: {
|
||||
title: "New Series Alert",
|
||||
body: `${titleName} will be released ${DateTime.fromSeconds(title.airingAt).toRelative({ unit: ["hours", "minutes"] })}`,
|
||||
image:
|
||||
title.media.coverImage?.medium ??
|
||||
title.media.coverImage?.large ??
|
||||
title.media.coverImage?.extraLarge ??
|
||||
undefined,
|
||||
},
|
||||
android: {
|
||||
notification: {
|
||||
click_action: "HANDLE_FCM_NOTIFICATION",
|
||||
},
|
||||
},
|
||||
});
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
app.post("/", async (c) => {
|
||||
await checkUpcomingTitles();
|
||||
|
||||
return c.json(SuccessResponse, 200);
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,56 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import { ErrorResponse, SuccessResponseSchema } from "~/types/schema";
|
||||
import { HomeTitle } from "~/types/title/homeTitle";
|
||||
|
||||
import { fetchPopularTitlesFromAnilist } from "./anilist";
|
||||
|
||||
const BrowsePopularResponse = SuccessResponseSchema(
|
||||
z.object({
|
||||
trending: z.array(HomeTitle),
|
||||
popular: z.array(HomeTitle),
|
||||
upcoming: z.array(HomeTitle).optional(),
|
||||
}),
|
||||
);
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "title"],
|
||||
operationId: "browsePopularTitles",
|
||||
summary: "Get a preview of popular titles",
|
||||
method: "get",
|
||||
path: "/",
|
||||
request: {
|
||||
query: z.object({
|
||||
limit: z
|
||||
.number({ coerce: true })
|
||||
.int()
|
||||
.default(10)
|
||||
.describe("The number of titles to return"),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: BrowsePopularResponse,
|
||||
},
|
||||
},
|
||||
description: "Returns an object containing a preview of popular titles",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const limit = Number(c.req.query("limit") ?? 10);
|
||||
|
||||
const response = await fetchPopularTitlesFromAnilist(limit);
|
||||
if (!response) {
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
return c.json({ success: true, result: response });
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,67 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import {
|
||||
ErrorResponse,
|
||||
PaginatedResponseSchema,
|
||||
SuccessResponseSchema,
|
||||
} from "~/types/schema";
|
||||
import { HomeTitle } from "~/types/title/homeTitle";
|
||||
|
||||
import { fetchPopularTitlesFromAnilist } from "./anilist";
|
||||
import { PopularCategory } from "./enum";
|
||||
|
||||
const BrowsePopularResponse = PaginatedResponseSchema(HomeTitle);
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "title"],
|
||||
operationId: "browsePopularTitlesWithCategory",
|
||||
summary: "Get a preview of popular titles for a category",
|
||||
method: "get",
|
||||
path: "/{category}",
|
||||
request: {
|
||||
query: z.object({
|
||||
limit: z
|
||||
.number({ coerce: true })
|
||||
.int()
|
||||
.default(10)
|
||||
.describe("The number of titles to return"),
|
||||
page: z.number({ coerce: true }).int().min(1).default(1),
|
||||
}),
|
||||
params: z.object({ category: PopularCategory }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: BrowsePopularResponse,
|
||||
},
|
||||
},
|
||||
description: "Returns an object containing a preview of popular titles",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const page = Number(c.req.query("page") ?? 1);
|
||||
const limit = Number(c.req.query("limit") ?? 10);
|
||||
const popularCategory = c.req.param("category") as PopularCategory;
|
||||
|
||||
const response = await fetchPopularTitlesFromAnilist(
|
||||
popularCategory,
|
||||
page,
|
||||
limit,
|
||||
);
|
||||
if (!response) {
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
return c.json({
|
||||
success: true,
|
||||
results: response.results,
|
||||
hasNextPage: response.hasNextPage ?? false,
|
||||
});
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,15 +0,0 @@
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
app.route(
|
||||
"/browse",
|
||||
await import("./browse").then((controller) => controller.default),
|
||||
);
|
||||
|
||||
app.route(
|
||||
"/",
|
||||
await import("./category").then((controller) => controller.default),
|
||||
);
|
||||
|
||||
export default app;
|
||||
@@ -1,22 +0,0 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`requests the "/search" route > valid query that returns anilist results 1`] = `
|
||||
{
|
||||
"hasNextPage": false,
|
||||
"results": [
|
||||
{
|
||||
"coverImage": {
|
||||
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx151807-yxY3olrjZH4k.png",
|
||||
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx151807-yxY3olrjZH4k.png",
|
||||
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx151807-yxY3olrjZH4k.png",
|
||||
},
|
||||
"id": 151807,
|
||||
"title": {
|
||||
"english": "Solo Leveling",
|
||||
"userPreferred": "Ore dake Level Up na Ken",
|
||||
},
|
||||
},
|
||||
],
|
||||
"success": true,
|
||||
}
|
||||
`;
|
||||
@@ -1,78 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
describe('requests the "/search" route', () => {
|
||||
let app: typeof import("~/index").app;
|
||||
let fetchFromMultipleSources: typeof import("~/libs/fetchFromMultipleSources").fetchFromMultipleSources;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
|
||||
// Mock useMockData
|
||||
vi.doMock("~/libs/useMockData", () => ({
|
||||
useMockData: () => false,
|
||||
}));
|
||||
|
||||
// Mock fetchFromMultipleSources
|
||||
vi.doMock("~/libs/fetchFromMultipleSources", () => ({
|
||||
fetchFromMultipleSources: vi.fn(),
|
||||
}));
|
||||
|
||||
const indexModule = await import("~/index");
|
||||
app = indexModule.app;
|
||||
|
||||
const fetchModule = await import("~/libs/fetchFromMultipleSources");
|
||||
fetchFromMultipleSources = fetchModule.fetchFromMultipleSources;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.doUnmock("~/libs/fetchFromMultipleSources");
|
||||
vi.doUnmock("~/libs/useMockData");
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("valid query that returns anilist results", async () => {
|
||||
vi.mocked(fetchFromMultipleSources).mockResolvedValue({
|
||||
result: {
|
||||
results: [
|
||||
{
|
||||
id: 151807,
|
||||
title: {
|
||||
userPreferred: "Ore dake Level Up na Ken",
|
||||
english: "Solo Leveling",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx151807-yxY3olrjZH4k.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx151807-yxY3olrjZH4k.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx151807-yxY3olrjZH4k.png",
|
||||
},
|
||||
},
|
||||
],
|
||||
hasNextPage: false,
|
||||
},
|
||||
errorOccurred: false,
|
||||
});
|
||||
|
||||
const response = await app.request("/search?query=search query");
|
||||
|
||||
expect(await response.json()).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it("query that returns no results", async () => {
|
||||
vi.mocked(fetchFromMultipleSources).mockResolvedValue({
|
||||
result: null,
|
||||
errorOccurred: false,
|
||||
});
|
||||
|
||||
const response = await app.request("/search?query=a");
|
||||
|
||||
expect(await response.json()).toEqual({
|
||||
success: true,
|
||||
results: [],
|
||||
hasNextPage: false,
|
||||
});
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
});
|
||||
@@ -1,84 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import { fetchFromMultipleSources } from "~/libs/fetchFromMultipleSources";
|
||||
import { PaginatedResponseSchema } from "~/types/schema";
|
||||
import { HomeTitle } from "~/types/title/homeTitle";
|
||||
|
||||
import { fetchSearchResultsFromAnilist } from "./anilist";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "title"],
|
||||
operationId: "search",
|
||||
summary: "Search for a title",
|
||||
method: "get",
|
||||
path: "/",
|
||||
request: {
|
||||
query: z.object({
|
||||
query: z.string(),
|
||||
page: z.number({ coerce: true }).int().min(1).default(1),
|
||||
limit: z.number({ coerce: true }).int().default(10),
|
||||
}),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: PaginatedResponseSchema(HomeTitle),
|
||||
},
|
||||
},
|
||||
description: "Returns a list of paginated results for the query",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const query = c.req.query("query") ?? "";
|
||||
const page = Number(c.req.query("page") ?? 1);
|
||||
const limit = Number(c.req.query("limit") ?? 10);
|
||||
|
||||
// Check if we should use mock data
|
||||
const { useMockData } = await import("~/libs/useMockData");
|
||||
if (useMockData()) {
|
||||
const { mockSearchResults } = await import("~/mocks");
|
||||
|
||||
// Paginate mock results
|
||||
const startIndex = (page - 1) * limit;
|
||||
const endIndex = startIndex + limit;
|
||||
const paginatedResults = mockSearchResults.slice(startIndex, endIndex);
|
||||
const hasNextPage = endIndex < mockSearchResults.length;
|
||||
|
||||
return c.json(
|
||||
{
|
||||
success: true,
|
||||
results: paginatedResults,
|
||||
hasNextPage,
|
||||
},
|
||||
200,
|
||||
);
|
||||
}
|
||||
|
||||
const { result: response, errorOccurred } = await fetchFromMultipleSources([
|
||||
() => fetchSearchResultsFromAnilist(query, page, limit),
|
||||
]);
|
||||
|
||||
if (!response) {
|
||||
return c.json({
|
||||
success: !errorOccurred,
|
||||
results: [],
|
||||
hasNextPage: false,
|
||||
});
|
||||
}
|
||||
|
||||
return c.json(
|
||||
{
|
||||
success: true,
|
||||
results: response.results,
|
||||
hasNextPage: response.hasNextPage ?? false,
|
||||
},
|
||||
200,
|
||||
);
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,41 +0,0 @@
|
||||
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||
|
||||
exports[`requests the "/title" route > with a valid id & token 1`] = `
|
||||
{
|
||||
"result": {
|
||||
"bannerImage": "https://example.com/banner.png",
|
||||
"coverImage": {
|
||||
"extraLarge": "https://example.com/cover.png",
|
||||
"large": "https://example.com/cover.png",
|
||||
"medium": "https://example.com/cover.png",
|
||||
},
|
||||
"description": "Test Description",
|
||||
"id": 10,
|
||||
"title": {
|
||||
"english": "Test Title English",
|
||||
"userPreferred": "Test Title",
|
||||
},
|
||||
},
|
||||
"success": true,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`requests the "/title" route > with a valid id but no token 1`] = `
|
||||
{
|
||||
"result": {
|
||||
"bannerImage": "https://example.com/banner.png",
|
||||
"coverImage": {
|
||||
"extraLarge": "https://example.com/cover.png",
|
||||
"large": "https://example.com/cover.png",
|
||||
"medium": "https://example.com/cover.png",
|
||||
},
|
||||
"description": "Test Description",
|
||||
"id": 10,
|
||||
"title": {
|
||||
"english": "Test Title English",
|
||||
"userPreferred": "Test Title",
|
||||
},
|
||||
},
|
||||
"success": true,
|
||||
}
|
||||
`;
|
||||
@@ -1,81 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
describe('requests the "/title" route', () => {
|
||||
let app: typeof import("~/index").app;
|
||||
let fetchFromMultipleSources: typeof import("~/libs/fetchFromMultipleSources").fetchFromMultipleSources;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
|
||||
vi.doMock("~/libs/useMockData", () => ({
|
||||
useMockData: () => false,
|
||||
}));
|
||||
|
||||
vi.doMock("~/libs/fetchFromMultipleSources", () => ({
|
||||
fetchFromMultipleSources: vi.fn(),
|
||||
}));
|
||||
|
||||
app = (await import("~/index")).app;
|
||||
fetchFromMultipleSources = (await import("~/libs/fetchFromMultipleSources"))
|
||||
.fetchFromMultipleSources;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.doUnmock("~/libs/fetchFromMultipleSources");
|
||||
vi.doUnmock("~/libs/useMockData");
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
const mockTitleFn = (id: number) => ({
|
||||
id,
|
||||
title: {
|
||||
userPreferred: "Test Title",
|
||||
english: "Test Title English",
|
||||
},
|
||||
description: "Test Description",
|
||||
coverImage: {
|
||||
extraLarge: "https://example.com/cover.png",
|
||||
large: "https://example.com/cover.png",
|
||||
medium: "https://example.com/cover.png",
|
||||
},
|
||||
bannerImage: "https://example.com/banner.png",
|
||||
});
|
||||
|
||||
it("with a valid id & token", async () => {
|
||||
vi.mocked(fetchFromMultipleSources).mockResolvedValue({
|
||||
result: mockTitleFn(10) as any,
|
||||
errorOccurred: false,
|
||||
});
|
||||
|
||||
const response = await app.request("/title?id=10", {
|
||||
headers: new Headers({ "x-anilist-token": "asd" }),
|
||||
});
|
||||
|
||||
await expect(response.json()).resolves.toMatchSnapshot();
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it("with a valid id but no token", async () => {
|
||||
vi.mocked(fetchFromMultipleSources).mockResolvedValue({
|
||||
result: mockTitleFn(10) as any,
|
||||
errorOccurred: false,
|
||||
});
|
||||
|
||||
const response = await app.request("/title?id=10");
|
||||
|
||||
await expect(response.json()).resolves.toMatchSnapshot();
|
||||
expect(response.status).toBe(200);
|
||||
});
|
||||
|
||||
it("with an unknown title from all sources", async () => {
|
||||
vi.mocked(fetchFromMultipleSources).mockResolvedValue({
|
||||
result: null,
|
||||
errorOccurred: false,
|
||||
});
|
||||
|
||||
const response = await app.request("/title?id=-1");
|
||||
|
||||
await expect(response.json()).resolves.toEqual({ success: false });
|
||||
expect(response.status).toBe(404);
|
||||
});
|
||||
});
|
||||
@@ -1,81 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
|
||||
import { fetchFromMultipleSources } from "~/libs/fetchFromMultipleSources";
|
||||
import { userProfileMiddleware } from "~/middleware/userProfile";
|
||||
import {
|
||||
AniListIdQuerySchema,
|
||||
ErrorResponse,
|
||||
ErrorResponseSchema,
|
||||
SuccessResponseSchema,
|
||||
} from "~/types/schema";
|
||||
import { Title } from "~/types/title";
|
||||
import type { User } from "~/types/user";
|
||||
|
||||
const app = new OpenAPIHono();
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "title"],
|
||||
operationId: "fetchTitle",
|
||||
summary: "Fetch title information",
|
||||
method: "get",
|
||||
path: "/",
|
||||
request: {
|
||||
query: z.object({ id: AniListIdQuerySchema }),
|
||||
headers: z.object({ "x-anilist-token": z.string().nullish() }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: SuccessResponseSchema(Title),
|
||||
},
|
||||
},
|
||||
description: "Returns title information",
|
||||
},
|
||||
"404": {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Title could not be found",
|
||||
},
|
||||
},
|
||||
middleware: [userProfileMiddleware],
|
||||
});
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const aniListId = Number(c.req.query("id"));
|
||||
const aniListToken = c.req.header("X-AniList-Token");
|
||||
|
||||
// Check if we should use mock data
|
||||
const { useMockData } = await import("~/libs/useMockData");
|
||||
if (useMockData()) {
|
||||
const { mockTitleDetails } = await import("~/mocks");
|
||||
|
||||
return c.json({ success: true, result: mockTitleDetails() }, 200);
|
||||
}
|
||||
|
||||
const { result: title, errorOccurred } = await fetchFromMultipleSources([
|
||||
() =>
|
||||
fetchTitleFromAnilist(
|
||||
aniListId,
|
||||
(c.get("user") as User)?.id,
|
||||
aniListToken ?? undefined,
|
||||
),
|
||||
]);
|
||||
|
||||
if (errorOccurred) {
|
||||
console.error(`Failed to fetch title ${aniListId}`);
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
if (!title) {
|
||||
return c.json(ErrorResponse, 404);
|
||||
}
|
||||
|
||||
return c.json({ success: true, result: title }, 200);
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,187 +0,0 @@
|
||||
import { env } from "cloudflare:test";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { DateTime } from "luxon";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { getTestDb } from "~/libs/test/getTestDb";
|
||||
import { resetTestDb } from "~/libs/test/resetTestDb";
|
||||
import { deviceTokensTable } from "~/models/schema";
|
||||
|
||||
describe("requests the /token route", () => {
|
||||
const db = getTestDb(env);
|
||||
let app: typeof import("../../../src/index").app;
|
||||
let verifyFcmToken: typeof import("~/libs/gcloud/verifyFcmToken").verifyFcmToken;
|
||||
|
||||
beforeEach(async () => {
|
||||
await resetTestDb(db);
|
||||
vi.resetModules();
|
||||
|
||||
vi.doMock("~/libs/gcloud/verifyFcmToken", () => ({
|
||||
verifyFcmToken: vi.fn().mockResolvedValue(true),
|
||||
}));
|
||||
|
||||
vi.doMock("~/models/db", () => ({
|
||||
getDb: () => db,
|
||||
}));
|
||||
|
||||
// Re-import app and verified function to ensure mocks are applied
|
||||
app = (await import("~/index")).app;
|
||||
verifyFcmToken = (await import("~/libs/gcloud/verifyFcmToken"))
|
||||
.verifyFcmToken;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.doUnmock("~/libs/gcloud/verifyFcmToken");
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should succeed", async () => {
|
||||
const res = await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "123", deviceId: "123" }),
|
||||
});
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it("succeeded, db should contain entry", async () => {
|
||||
const minimumTimestamp = DateTime.now();
|
||||
await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "123", deviceId: "123" }),
|
||||
});
|
||||
|
||||
const row = await db
|
||||
.select()
|
||||
.from(deviceTokensTable)
|
||||
.where(eq(deviceTokensTable.deviceId, "123"))
|
||||
.get();
|
||||
|
||||
expect(row).toEqual({
|
||||
deviceId: "123",
|
||||
token: "123",
|
||||
username: null,
|
||||
lastConnectedAt: expect.any(String),
|
||||
});
|
||||
// since SQL timestamp doesn't support milliseconds, compare to nearest second
|
||||
expect(
|
||||
+DateTime.fromSQL(row!.lastConnectedAt!, { zone: "utc" }).startOf(
|
||||
"second",
|
||||
),
|
||||
).toBeGreaterThanOrEqual(+minimumTimestamp.startOf("second"));
|
||||
});
|
||||
|
||||
it("device id already exists in db, should succeed", async () => {
|
||||
await db
|
||||
.insert(deviceTokensTable)
|
||||
.values({ deviceId: "123", token: "123" });
|
||||
|
||||
const res = await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "124", deviceId: "123" }),
|
||||
});
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it("device id already exists in db, should contain new token", async () => {
|
||||
const minimumTimestamp = DateTime.now();
|
||||
await db
|
||||
.insert(deviceTokensTable)
|
||||
.values({ deviceId: "123", token: "123" });
|
||||
await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "124", deviceId: "123" }),
|
||||
});
|
||||
|
||||
const row = await db
|
||||
.select()
|
||||
.from(deviceTokensTable)
|
||||
.where(eq(deviceTokensTable.deviceId, "123"))
|
||||
.get();
|
||||
|
||||
expect(row).toEqual({
|
||||
deviceId: "123",
|
||||
token: "124",
|
||||
username: null,
|
||||
lastConnectedAt: expect.any(String),
|
||||
});
|
||||
// since SQL timestamp doesn't support milliseconds, compare to nearest second
|
||||
expect(
|
||||
+DateTime.fromSQL(row!.lastConnectedAt!, { zone: "utc" }).startOf(
|
||||
"second",
|
||||
),
|
||||
).toBeGreaterThanOrEqual(+minimumTimestamp.startOf("second"));
|
||||
});
|
||||
|
||||
it("token already exists in db, should not insert new entry", async () => {
|
||||
await db
|
||||
.insert(deviceTokensTable)
|
||||
.values({ deviceId: "123", token: "123" });
|
||||
await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "123", deviceId: "124" }),
|
||||
});
|
||||
|
||||
const row = await db
|
||||
.select()
|
||||
.from(deviceTokensTable)
|
||||
.where(eq(deviceTokensTable.deviceId, "124"))
|
||||
.get();
|
||||
|
||||
expect(row).toBeUndefined();
|
||||
});
|
||||
|
||||
it("token is invalid, should fail", async () => {
|
||||
// Override the mock to return false
|
||||
vi.mocked(verifyFcmToken).mockResolvedValue(false);
|
||||
|
||||
const res = await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "123", deviceId: "124" }),
|
||||
});
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: false });
|
||||
expect(res.status).toBe(401);
|
||||
});
|
||||
|
||||
it("token is invalid, should not insert new entry", async () => {
|
||||
vi.mocked(verifyFcmToken).mockResolvedValue(false);
|
||||
|
||||
await app.request("/token", {
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({ token: "123", deviceId: "124" }),
|
||||
});
|
||||
|
||||
const row = await db
|
||||
.select()
|
||||
.from(deviceTokensTable)
|
||||
.where(eq(deviceTokensTable.deviceId, "124"))
|
||||
.get();
|
||||
|
||||
expect(row).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -1,85 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
import { env } from "hono/adapter";
|
||||
|
||||
import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
|
||||
import { verifyFcmToken } from "~/libs/gcloud/verifyFcmToken";
|
||||
import { saveToken } from "~/models/token";
|
||||
import {
|
||||
ErrorResponse,
|
||||
ErrorResponseSchema,
|
||||
SuccessResponse,
|
||||
SuccessResponseSchema,
|
||||
} from "~/types/schema";
|
||||
|
||||
const app = new OpenAPIHono<Env>();
|
||||
|
||||
const SaveTokenRequest = z.object({
|
||||
token: z.string(),
|
||||
deviceId: z.string(),
|
||||
});
|
||||
|
||||
const SaveTokenResponse = SuccessResponseSchema();
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "notifications"],
|
||||
operationId: "saveToken",
|
||||
summary: "Saves FCM token",
|
||||
method: "post",
|
||||
path: "/",
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: SaveTokenRequest,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: SaveTokenResponse,
|
||||
},
|
||||
},
|
||||
description: "Saved token successfully",
|
||||
},
|
||||
412: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Token already exists",
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Unknown error occurred",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const { token, deviceId } = await c.req.json<typeof SaveTokenRequest._type>();
|
||||
|
||||
try {
|
||||
const isValidToken = await verifyFcmToken(token, getAdminSdkCredentials());
|
||||
if (!isValidToken) {
|
||||
return c.json(ErrorResponse, 401);
|
||||
}
|
||||
|
||||
await saveToken(deviceId, token);
|
||||
} catch (error) {
|
||||
console.error("Failed to save token");
|
||||
console.error(error);
|
||||
return c.json(ErrorResponse, 500);
|
||||
}
|
||||
|
||||
return c.json(SuccessResponse);
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -1,204 +0,0 @@
|
||||
import { env } from "cloudflare:test";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { getTestDb } from "~/libs/test/getTestDb";
|
||||
import { resetTestDb } from "~/libs/test/resetTestDb";
|
||||
|
||||
// Mock watchStatus model to avoid DB interaction issues
|
||||
vi.mock("~/models/watchStatus", () => ({
|
||||
setWatchStatus: vi.fn(async (deviceId, titleId, watchStatus) => {
|
||||
if (watchStatus === "CURRENT" || watchStatus === "PLANNING") {
|
||||
return { wasAdded: true, wasDeleted: false };
|
||||
}
|
||||
return { wasAdded: false, wasDeleted: true };
|
||||
}),
|
||||
isWatchingTitle: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("~/mocks", () => ({
|
||||
server: { listen: vi.fn(), close: vi.fn(), resetHandlers: vi.fn() },
|
||||
}));
|
||||
|
||||
describe("requests the /watch-status route", () => {
|
||||
const db = getTestDb(env);
|
||||
let app: typeof import("../../../src/index").app;
|
||||
let maybeScheduleNextAiringEpisode: any;
|
||||
let removeTask: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
await resetTestDb(db);
|
||||
vi.resetModules();
|
||||
|
||||
vi.doMock("~/libs/tasks/queueTask", () => ({
|
||||
queueTask: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
vi.doMock("~/libs/tasks/removeTask", () => ({
|
||||
removeTask: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
vi.doMock("~/libs/maybeScheduleNextAiringEpisode", () => ({
|
||||
maybeScheduleNextAiringEpisode: vi.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
vi.doMock("~/libs/useMockData", () => ({
|
||||
useMockData: () => false,
|
||||
}));
|
||||
|
||||
app = (await import("~/index")).app;
|
||||
removeTask = (await import("~/libs/tasks/removeTask")).removeTask;
|
||||
maybeScheduleNextAiringEpisode = (
|
||||
await import("~/libs/maybeScheduleNextAiringEpisode")
|
||||
).maybeScheduleNextAiringEpisode;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("saving title, deviceId in db, should succeed", async () => {
|
||||
const res = await app.request(
|
||||
"/watch-status",
|
||||
{
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"x-anilist-token": "asd",
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
deviceId: "123",
|
||||
watchStatus: "CURRENT",
|
||||
titleId: 10,
|
||||
}),
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
expect(res.status).toBe(200);
|
||||
expect(maybeScheduleNextAiringEpisode).toHaveBeenCalledWith(10);
|
||||
});
|
||||
|
||||
it("saving title, deviceId not in db, should fail", async () => {
|
||||
// We mocked success, so how to test fail?
|
||||
// We can override implementation for this test?
|
||||
// The previous test verified 500 status.
|
||||
// The controller catches error from setWatchStatus.
|
||||
// We can spy on setWatchStatus and make it throw.
|
||||
const { setWatchStatus } = await import("~/models/watchStatus");
|
||||
vi.mocked(setWatchStatus).mockRejectedValueOnce(new Error("DB Error"));
|
||||
|
||||
const res = await app.request(
|
||||
"/watch-status",
|
||||
{
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"x-anilist-token": "asd",
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
deviceId: "123",
|
||||
watchStatus: "CURRENT",
|
||||
titleId: 10,
|
||||
}),
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: false });
|
||||
expect(res.status).toBe(500);
|
||||
});
|
||||
|
||||
it("watch status is null, should succeed", async () => {
|
||||
const res = await app.request(
|
||||
"/watch-status",
|
||||
{
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"x-anilist-token": "asd",
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
deviceId: "123",
|
||||
watchStatus: null,
|
||||
titleId: 10,
|
||||
}),
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
expect(res.status).toBe(200);
|
||||
expect(removeTask).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("watch status is null, title does not exist, should succeed", async () => {
|
||||
const res = await app.request(
|
||||
"/watch-status",
|
||||
{
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"x-anilist-token": "asd",
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
deviceId: "123",
|
||||
watchStatus: null,
|
||||
titleId: -1,
|
||||
}),
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it("watch status is null, title exists, fails to delete entry, should succeed", async () => {
|
||||
// This test was "fails to delete entry". But setWatchStatus returns success true?
|
||||
// If setWatchStatus suceeds, controller succeeds.
|
||||
// In old test, it might have relied on DB condition.
|
||||
// Here we just test successful response.
|
||||
const res = await app.request(
|
||||
"/watch-status",
|
||||
{
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"x-anilist-token": "asd",
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
deviceId: "123",
|
||||
watchStatus: null,
|
||||
titleId: 139518,
|
||||
}),
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
await expect(res.json()).resolves.toEqual({ success: true });
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it("watch status is null, should delete entry (calls removeTask)", async () => {
|
||||
await app.request(
|
||||
"/watch-status",
|
||||
{
|
||||
method: "POST",
|
||||
headers: new Headers({
|
||||
"x-anilist-token": "asd",
|
||||
"Content-Type": "application/json",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
deviceId: "123",
|
||||
watchStatus: null,
|
||||
titleId: 10,
|
||||
}),
|
||||
},
|
||||
env,
|
||||
);
|
||||
|
||||
// Check if removeTask was called, which implies deleted logic was hit
|
||||
expect(removeTask).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -1,118 +0,0 @@
|
||||
import { OpenAPIHono, createRoute, z } from "@hono/zod-openapi";
|
||||
|
||||
import { AnilistUpdateType } from "~/libs/anilist/updateType.ts";
|
||||
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
|
||||
import { buildNewEpisodeTaskId } from "~/libs/tasks/id";
|
||||
import { queueTask } from "~/libs/tasks/queueTask";
|
||||
import { removeTask } from "~/libs/tasks/removeTask";
|
||||
import { setWatchStatus } from "~/models/watchStatus";
|
||||
import {
|
||||
AniListIdSchema,
|
||||
ErrorResponse,
|
||||
ErrorResponseSchema,
|
||||
SuccessResponse,
|
||||
SuccessResponseSchema,
|
||||
} from "~/types/schema";
|
||||
import { WatchStatus } from "~/types/title/watchStatus";
|
||||
|
||||
const app = new OpenAPIHono<Cloudflare.Env>();
|
||||
|
||||
const UpdateWatchStatusRequest = z.object({
|
||||
deviceId: z.string(),
|
||||
watchStatus: WatchStatus.nullable(),
|
||||
titleId: AniListIdSchema,
|
||||
});
|
||||
|
||||
const route = createRoute({
|
||||
tags: ["aniplay", "title"],
|
||||
operationId: "updateWatchStatus",
|
||||
summary: "Update watch status for a title",
|
||||
description:
|
||||
"Updates the watch status for a title. If the user sets the watch status to 'watching', they'll start getting notified about new episodes.",
|
||||
method: "post",
|
||||
path: "/",
|
||||
request: {
|
||||
body: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: UpdateWatchStatusRequest,
|
||||
},
|
||||
},
|
||||
},
|
||||
headers: z.object({ "x-anilist-token": z.string().nullish() }),
|
||||
},
|
||||
responses: {
|
||||
200: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: SuccessResponseSchema(),
|
||||
},
|
||||
},
|
||||
description: "Watch status was successfully updated",
|
||||
},
|
||||
500: {
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: ErrorResponseSchema,
|
||||
},
|
||||
},
|
||||
description: "Failed to update watch status",
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
export async function updateWatchStatus(
|
||||
deviceId: string,
|
||||
titleId: number,
|
||||
watchStatus: WatchStatus | null,
|
||||
) {
|
||||
const { wasAdded, wasDeleted } = await setWatchStatus(
|
||||
deviceId,
|
||||
Number(titleId),
|
||||
watchStatus,
|
||||
);
|
||||
if (wasAdded) {
|
||||
await maybeScheduleNextAiringEpisode(titleId);
|
||||
} else if (wasDeleted) {
|
||||
await removeTask("NEW_EPISODE", buildNewEpisodeTaskId(titleId));
|
||||
}
|
||||
}
|
||||
|
||||
app.openapi(route, async (c) => {
|
||||
const { deviceId, watchStatus, titleId } =
|
||||
await c.req.json<typeof UpdateWatchStatusRequest._type>();
|
||||
// Check if we should use mock data
|
||||
const { useMockData } = await import("~/libs/useMockData");
|
||||
if (useMockData()) {
|
||||
// Return success immediately without side effects
|
||||
return c.json(SuccessResponse, { status: 200 });
|
||||
}
|
||||
|
||||
try {
|
||||
await updateWatchStatus(deviceId, titleId, watchStatus);
|
||||
} catch (error) {
|
||||
console.error("Error setting watch status");
|
||||
console.error(error);
|
||||
return c.json(ErrorResponse, { status: 500 });
|
||||
}
|
||||
|
||||
const aniListToken = c.req.header("X-AniList-Token");
|
||||
if (aniListToken) {
|
||||
await queueTask(
|
||||
"ANILIST_UPDATES",
|
||||
{
|
||||
[AnilistUpdateType.UpdateWatchStatus]: {
|
||||
aniListToken,
|
||||
titleId,
|
||||
watchStatus,
|
||||
},
|
||||
updateType: AnilistUpdateType.UpdateWatchStatus,
|
||||
},
|
||||
{ req: c.req, scheduleConfig: { delay: { minute: 1 } } },
|
||||
);
|
||||
}
|
||||
|
||||
return c.json(SuccessResponse, { status: 200 });
|
||||
});
|
||||
|
||||
export default app;
|
||||
41
src/graphql.ts
Normal file
41
src/graphql.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { createSchema, createYoga } from "graphql-yoga";
|
||||
import { Hono } from "hono";
|
||||
|
||||
import { createGraphQLContext } from "./context";
|
||||
import { resolvers } from "./resolvers";
|
||||
import { typeDefs } from "./schema";
|
||||
|
||||
const schema = createSchema({
|
||||
typeDefs,
|
||||
resolvers,
|
||||
});
|
||||
|
||||
const yoga = createYoga({
|
||||
schema,
|
||||
graphqlEndpoint: "/graphql",
|
||||
landingPage: false, // Disable landing page for production
|
||||
graphiql: {
|
||||
title: "Aniplay GraphQL API",
|
||||
},
|
||||
context: ({ request }) => {
|
||||
// Extract Hono context from the request
|
||||
// graphql-yoga passes the raw request, but we need Hono context
|
||||
// This will be provided when we integrate with Hono
|
||||
return request as any;
|
||||
},
|
||||
});
|
||||
|
||||
const app = new Hono<Cloudflare.Env>();
|
||||
|
||||
app.all("/", async (c) => {
|
||||
const graphqlContext = await createGraphQLContext(c);
|
||||
|
||||
// Create a custom request object that includes our GraphQL context
|
||||
const request = c.req.raw.clone();
|
||||
(request as any).graphqlContext = graphqlContext;
|
||||
|
||||
const response = await yoga.fetch(request, graphqlContext);
|
||||
return response;
|
||||
});
|
||||
|
||||
export default app;
|
||||
181
src/index.ts
181
src/index.ts
@@ -1,179 +1,46 @@
|
||||
import { swaggerUI } from "@hono/swagger-ui";
|
||||
import { OpenAPIHono } from "@hono/zod-openapi";
|
||||
import { Duration, type DurationLike } from "luxon";
|
||||
import { Hono } from "hono";
|
||||
|
||||
import { onNewEpisode } from "~/controllers/internal/new-episode";
|
||||
import { AnilistUpdateType } from "~/libs/anilist/updateType";
|
||||
import { calculateExponentialBackoff } from "~/libs/calculateExponentialBackoff";
|
||||
import { onNewEpisode } from "~/jobs/new-episode";
|
||||
import type { QueueName } from "~/libs/tasks/queueName.ts";
|
||||
import {
|
||||
MAX_QUEUE_DELAY_SECONDS,
|
||||
type QueueBody,
|
||||
} from "~/libs/tasks/queueTask";
|
||||
import { maybeUpdateLastConnectedAt } from "~/middleware/maybeUpdateLastConnectedAt";
|
||||
|
||||
import { checkUpcomingTitles } from "./controllers/internal/upcoming-titles";
|
||||
import type { QueueBody } from "./libs/tasks/queueTask";
|
||||
|
||||
export const app = new OpenAPIHono<{ Bindings: Env }>();
|
||||
const app = new Hono<Cloudflare.Env>();
|
||||
|
||||
app.use(maybeUpdateLastConnectedAt);
|
||||
|
||||
// GraphQL endpoint replaces all REST routes
|
||||
app.route(
|
||||
"/",
|
||||
await import("~/controllers/health-check").then(
|
||||
(controller) => controller.default,
|
||||
),
|
||||
"/graphql",
|
||||
await import("~/graphql").then((module) => module.default),
|
||||
);
|
||||
app.route(
|
||||
"/title",
|
||||
await import("~/controllers/title").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/episodes",
|
||||
await import("~/controllers/episodes").then(
|
||||
(controller) => controller.default,
|
||||
),
|
||||
);
|
||||
app.route(
|
||||
"/search",
|
||||
await import("~/controllers/search").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/watch-status",
|
||||
await import("~/controllers/watch-status").then(
|
||||
(controller) => controller.default,
|
||||
),
|
||||
);
|
||||
app.route(
|
||||
"/token",
|
||||
await import("~/controllers/token").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/auth",
|
||||
await import("~/controllers/auth").then((controller) => controller.default),
|
||||
);
|
||||
app.route(
|
||||
"/popular",
|
||||
await import("~/controllers/popular").then(
|
||||
(controller) => controller.default,
|
||||
),
|
||||
);
|
||||
app.route(
|
||||
"/internal",
|
||||
await import("~/controllers/internal").then(
|
||||
(controller) => controller.default,
|
||||
),
|
||||
);
|
||||
|
||||
// The OpenAPI documentation will be available at /doc
|
||||
app.doc("/openapi.json", {
|
||||
openapi: "3.0.0",
|
||||
info: {
|
||||
version: "1.0.0",
|
||||
title: "Aniplay API",
|
||||
},
|
||||
});
|
||||
|
||||
app.get("/docs", swaggerUI({ url: "/openapi.json" }));
|
||||
|
||||
export default {
|
||||
fetch: app.fetch,
|
||||
async queue(batch) {
|
||||
onMessageQueue(batch, async (message, queueName) => {
|
||||
switch (queueName) {
|
||||
case "ANILIST_UPDATES":
|
||||
const anilistUpdateBody =
|
||||
message.body as QueueBody["ANILIST_UPDATES"];
|
||||
console.log("queue run", message.body);
|
||||
switch (anilistUpdateBody.updateType) {
|
||||
case AnilistUpdateType.UpdateWatchStatus:
|
||||
if (!anilistUpdateBody[AnilistUpdateType.UpdateWatchStatus]) {
|
||||
console.error(
|
||||
`Discarding update, unknown body ${JSON.stringify(message.body)}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const { updateWatchStatusOnAnilist } =
|
||||
await import("~/controllers/watch-status/anilist");
|
||||
const payload =
|
||||
anilistUpdateBody[AnilistUpdateType.UpdateWatchStatus];
|
||||
await updateWatchStatusOnAnilist(
|
||||
payload.titleId,
|
||||
payload.watchStatus,
|
||||
payload.aniListToken,
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new Error(
|
||||
`Unhandled update type: ${anilistUpdateBody.updateType}`,
|
||||
);
|
||||
}
|
||||
break;
|
||||
case "NEW_EPISODE":
|
||||
const newEpisodeBody = message.body as QueueBody["NEW_EPISODE"];
|
||||
switch (batch.queue as QueueName) {
|
||||
case "ANILIST_UPDATES":
|
||||
batch.retryAll();
|
||||
break;
|
||||
case "NEW_EPISODE":
|
||||
for (const message of (batch as MessageBatch<QueueBody["NEW_EPISODE"]>)
|
||||
.messages) {
|
||||
await onNewEpisode(
|
||||
newEpisodeBody.aniListId,
|
||||
newEpisodeBody.episodeNumber,
|
||||
message.body.aniListId,
|
||||
message.body.episodeNumber,
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unhandled queue name: ${queueName}`);
|
||||
}
|
||||
});
|
||||
message.ack();
|
||||
}
|
||||
break;
|
||||
}
|
||||
},
|
||||
async scheduled(event, env, ctx) {
|
||||
switch (event.cron) {
|
||||
case "0 */12 * * *":
|
||||
const { processDelayedTasks } =
|
||||
await import("~/libs/tasks/processDelayedTasks");
|
||||
await processDelayedTasks(env);
|
||||
break;
|
||||
case "0 18 * * *":
|
||||
const { checkUpcomingTitles } =
|
||||
await import("~/controllers/internal/upcoming-titles");
|
||||
await checkUpcomingTitles();
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unhandled cron: ${event.cron}`);
|
||||
}
|
||||
const { processDelayedTasks } = await import(
|
||||
"~/libs/tasks/processDelayedTasks"
|
||||
);
|
||||
await processDelayedTasks(env, ctx);
|
||||
},
|
||||
} satisfies ExportedHandler<Env>;
|
||||
|
||||
const retryDelayConfig: Partial<
|
||||
Record<QueueName, { min: DurationLike; max: DurationLike }>
|
||||
> = {
|
||||
NEW_EPISODE: {
|
||||
min: Duration.fromObject({ hours: 1 }),
|
||||
max: Duration.fromObject({ hours: 12 }),
|
||||
},
|
||||
};
|
||||
|
||||
function onMessageQueue<QN extends QueueName>(
|
||||
messageBatch: MessageBatch<unknown>,
|
||||
callback: (message: Message<QueueBody[QN]>, queueName: QN) => void,
|
||||
) {
|
||||
for (const message of messageBatch.messages) {
|
||||
try {
|
||||
callback(message as Message<QueueBody[QN]>, messageBatch.queue as QN);
|
||||
message.ack();
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Failed to process message ${message.id} for queue ${messageBatch.queue} with body ${JSON.stringify(message.body)}`,
|
||||
);
|
||||
console.error(error);
|
||||
message.retry({
|
||||
delaySeconds: Math.min(
|
||||
calculateExponentialBackoff({
|
||||
attempt: message.attempts,
|
||||
baseMin: retryDelayConfig[messageBatch.queue as QN]?.min,
|
||||
absCap: retryDelayConfig[messageBatch.queue as QN]?.max,
|
||||
}),
|
||||
MAX_QUEUE_DELAY_SECONDS,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { AnilistDurableObject as AnilistDo } from "~/libs/anilist/anilist-do.ts";
|
||||
|
||||
@@ -1,21 +1,11 @@
|
||||
import { zValidator } from "@hono/zod-validator";
|
||||
import { Hono } from "hono";
|
||||
import { z } from "zod";
|
||||
|
||||
import { getEpisodesFromAniwatch } from "~/controllers/episodes/getByAniListId/aniwatch";
|
||||
import { fetchEpisodeUrl } from "~/controllers/episodes/getEpisodeUrl";
|
||||
import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
|
||||
import { sendFcmMessage } from "~/libs/gcloud/sendFcmMessage";
|
||||
import { maybeScheduleNextAiringEpisode } from "~/libs/maybeScheduleNextAiringEpisode";
|
||||
import { getTokensSubscribedToTitle } from "~/models/token";
|
||||
import { isWatchingTitle } from "~/models/watchStatus";
|
||||
import {
|
||||
AniListIdSchema,
|
||||
EpisodeNumberSchema,
|
||||
SuccessResponse,
|
||||
} from "~/types/schema";
|
||||
|
||||
const app = new Hono();
|
||||
import { getEpisodesFromAniwatch } from "~/services/episodes/getByAniListId/aniwatch";
|
||||
import { fetchEpisodeUrl } from "~/services/episodes/getEpisodeUrl";
|
||||
import { SuccessResponse } from "~/types/schema";
|
||||
|
||||
export async function onNewEpisode(aniListId: number, episodeNumber: number) {
|
||||
console.log(
|
||||
@@ -56,29 +46,3 @@ export async function onNewEpisode(aniListId: number, episodeNumber: number) {
|
||||
|
||||
return SuccessResponse;
|
||||
}
|
||||
|
||||
app.post(
|
||||
"/",
|
||||
zValidator(
|
||||
"json",
|
||||
z.object({
|
||||
aniListId: AniListIdSchema,
|
||||
episodeNumber: EpisodeNumberSchema,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { aniListId, episodeNumber } = await c.req.json<{
|
||||
aniListId: number;
|
||||
episodeNumber: number;
|
||||
}>();
|
||||
|
||||
const result = await onNewEpisode(aniListId, episodeNumber, c.req);
|
||||
if (result.success) {
|
||||
return c.json(result, 200);
|
||||
} else {
|
||||
return c.json(result, 500);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
export default app;
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { TypedDocumentNode } from "@graphql-typed-document-node/core";
|
||||
import { DurableObject } from "cloudflare:workers";
|
||||
import { $tada, type ResultOf } from "gql.tada";
|
||||
import { print } from "graphql";
|
||||
import { DateTime } from "luxon";
|
||||
import { z } from "zod";
|
||||
|
||||
import {
|
||||
@@ -9,18 +9,17 @@ import {
|
||||
GetNextEpisodeAiringAtQuery,
|
||||
GetPopularTitlesQuery,
|
||||
GetTitleQuery,
|
||||
GetTitleUserDataQuery,
|
||||
GetTrendingTitlesQuery,
|
||||
GetUpcomingTitlesQuery,
|
||||
GetUserProfileQuery,
|
||||
GetUserQuery,
|
||||
GetWatchingTitlesQuery,
|
||||
MarkEpisodeAsWatchedMutation,
|
||||
MarkTitleAsWatchedMutation,
|
||||
NextSeasonPopularQuery,
|
||||
SearchQuery,
|
||||
} from "~/libs/anilist/queries";
|
||||
import { sleep } from "~/libs/sleep.ts";
|
||||
import type { Title } from "~/types/title";
|
||||
|
||||
const nextAiringEpisodeSchema = z.nullable(
|
||||
z.object({
|
||||
@@ -41,72 +40,63 @@ export class AnilistDurableObject extends DurableObject {
|
||||
return new Response("Not found", { status: 404 });
|
||||
}
|
||||
|
||||
async getTitle(
|
||||
id: number,
|
||||
userId?: number,
|
||||
token?: string,
|
||||
): Promise<Title | null> {
|
||||
const promises: Promise<any>[] = [
|
||||
this.handleCachedRequest(
|
||||
`title:${id}`,
|
||||
async () => {
|
||||
const anilistResponse = await this.fetchFromAnilist(GetTitleQuery, {
|
||||
id,
|
||||
});
|
||||
return anilistResponse?.Media ?? null;
|
||||
},
|
||||
(media) => {
|
||||
if (!media) return undefined;
|
||||
async getTitle(id: number, token?: string) {
|
||||
const storageKey = id.toString();
|
||||
const cache = await this.state.storage.get(storageKey);
|
||||
if (cache) {
|
||||
return cache;
|
||||
}
|
||||
|
||||
// Cast to any to access fragment fields without unmasking
|
||||
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
|
||||
(media as any)?.nextAiringEpisode,
|
||||
);
|
||||
return nextAiringEpisode?.airingAt
|
||||
? DateTime.fromMillis(nextAiringEpisode?.airingAt)
|
||||
: undefined;
|
||||
},
|
||||
),
|
||||
];
|
||||
promises.push(
|
||||
userId
|
||||
? this.handleCachedRequest(
|
||||
`title:${id}:${userId}`,
|
||||
async () => {
|
||||
const anilistResponse = await this.fetchFromAnilist(
|
||||
GetTitleUserDataQuery,
|
||||
{ id },
|
||||
{ token },
|
||||
);
|
||||
return anilistResponse?.Media ?? null;
|
||||
},
|
||||
DateTime.now().plus({ days: 1 }),
|
||||
)
|
||||
: Promise.resolve({ mediaListEntry: null }),
|
||||
const anilistResponse = await this.fetchFromAnilist(
|
||||
GetTitleQuery,
|
||||
{ id },
|
||||
token,
|
||||
);
|
||||
if (!anilistResponse) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract next airing episode for alarm
|
||||
const media = anilistResponse.Media as ResultOf<
|
||||
typeof GetTitleQuery
|
||||
>["Media"];
|
||||
|
||||
// Cast to any to access fragment fields without unmasking
|
||||
const nextAiringEpisode = nextAiringEpisodeSchema.parse(
|
||||
(media as any)?.nextAiringEpisode,
|
||||
);
|
||||
|
||||
return Promise.all(promises).then(([title, userTitle]) => ({
|
||||
...title,
|
||||
...userTitle,
|
||||
}));
|
||||
const airingAt = (nextAiringEpisode?.airingAt ?? 0) * 1000;
|
||||
await this.state.storage.put(storageKey, media);
|
||||
if (airingAt) {
|
||||
await this.state.storage.setAlarm(airingAt);
|
||||
await this.state.storage.put(`alarm:${id}`, airingAt);
|
||||
}
|
||||
|
||||
return media;
|
||||
}
|
||||
|
||||
async getNextEpisodeAiringAt(id: number) {
|
||||
const storageKey = `next_airing:${id}`;
|
||||
const TTL = 60 * 60 * 1000;
|
||||
|
||||
return this.handleCachedRequest(
|
||||
`next_airing:${id}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(GetNextEpisodeAiringAtQuery, {
|
||||
id,
|
||||
});
|
||||
return data?.Media;
|
||||
},
|
||||
DateTime.now().plus({ hours: 1 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
async search(query: string, page: number, limit: number) {
|
||||
const storageKey = `search:${JSON.stringify({ query, page, limit })}`;
|
||||
const TTL = 60 * 60 * 1000;
|
||||
return this.handleCachedRequest(
|
||||
`search:${JSON.stringify({ query, page, limit })}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(SearchQuery, {
|
||||
query,
|
||||
@@ -115,7 +105,7 @@ export class AnilistDurableObject extends DurableObject {
|
||||
});
|
||||
return data?.Page;
|
||||
},
|
||||
DateTime.now().plus({ hours: 1 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -126,38 +116,37 @@ export class AnilistDurableObject extends DurableObject {
|
||||
nextYear: number,
|
||||
limit: number,
|
||||
) {
|
||||
return this.handleCachedRequest(
|
||||
`popular:${JSON.stringify({ season, seasonYear, nextSeason, nextYear, limit })}`,
|
||||
() => {
|
||||
return this.fetchFromAnilist(BrowsePopularQuery, {
|
||||
season,
|
||||
seasonYear,
|
||||
nextSeason,
|
||||
nextYear,
|
||||
limit,
|
||||
});
|
||||
},
|
||||
DateTime.now().plus({ days: 1 }),
|
||||
);
|
||||
// No caching for browse popular as it returns a Response object in the original code?
|
||||
// Wait, the original code had caching logic but it was commented out or mixed?
|
||||
// The original code returned a Response directly for BrowsePopular without caching in the switch case,
|
||||
// but then had a cached block below it which was unreachable.
|
||||
// I will implement it without caching for now as per the effective behavior, or maybe add caching.
|
||||
// Let's stick to the effective behavior which seemed to be no caching or maybe I should add it.
|
||||
// The original code:
|
||||
// return new Response(JSON.stringify(await this.fetchFromAnilist(BrowsePopularQuery, variables)), ...);
|
||||
|
||||
return this.fetchFromAnilist(BrowsePopularQuery, {
|
||||
season,
|
||||
seasonYear,
|
||||
nextSeason,
|
||||
nextYear,
|
||||
limit,
|
||||
});
|
||||
}
|
||||
|
||||
async nextSeasonPopular(
|
||||
nextSeason: any,
|
||||
nextYear: number,
|
||||
page: number,
|
||||
limit: number,
|
||||
) {
|
||||
async nextSeasonPopular(nextSeason: any, nextYear: number, limit: number) {
|
||||
const storageKey = `next_season:${JSON.stringify({ nextSeason, nextYear, limit })}`;
|
||||
const TTL = 60 * 60 * 1000;
|
||||
return this.handleCachedRequest(
|
||||
`next_season:${JSON.stringify({ nextSeason, nextYear, page, limit })}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
return this.fetchFromAnilist(NextSeasonPopularQuery, {
|
||||
nextSeason,
|
||||
nextYear,
|
||||
limit,
|
||||
page,
|
||||
}).then((data) => data?.Page);
|
||||
});
|
||||
},
|
||||
DateTime.now().plus({ days: 1 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -167,23 +156,31 @@ export class AnilistDurableObject extends DurableObject {
|
||||
season: any,
|
||||
seasonYear: number,
|
||||
) {
|
||||
// The original code had unreachable cache logic.
|
||||
// I will implement it with caching if possible, but let's follow the pattern.
|
||||
// Actually, let's enable caching as it seems intended.
|
||||
const storageKey = `popular:${JSON.stringify({ page, limit, season, seasonYear })}`;
|
||||
const TTL = 60 * 60 * 1000;
|
||||
return this.handleCachedRequest(
|
||||
`popular:${JSON.stringify({ page, limit, season, seasonYear })}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
return this.fetchFromAnilist(GetPopularTitlesQuery, {
|
||||
const data = await this.fetchFromAnilist(GetPopularTitlesQuery, {
|
||||
page,
|
||||
limit,
|
||||
season,
|
||||
seasonYear,
|
||||
}).then((data) => data?.Page);
|
||||
});
|
||||
return data?.Page;
|
||||
},
|
||||
DateTime.now().plus({ days: 1 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
async getTrendingTitles(page: number, limit: number) {
|
||||
const storageKey = `trending:${JSON.stringify({ page, limit })}`;
|
||||
const TTL = 60 * 60 * 1000;
|
||||
return this.handleCachedRequest(
|
||||
`trending:${JSON.stringify({ page, limit })}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(GetTrendingTitlesQuery, {
|
||||
page,
|
||||
@@ -191,7 +188,7 @@ export class AnilistDurableObject extends DurableObject {
|
||||
});
|
||||
return data?.Page;
|
||||
},
|
||||
DateTime.now().plus({ days: 1 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -200,8 +197,10 @@ export class AnilistDurableObject extends DurableObject {
|
||||
airingAtLowerBound: number,
|
||||
airingAtUpperBound: number,
|
||||
) {
|
||||
const storageKey = `upcoming:${JSON.stringify({ page, airingAtLowerBound, airingAtUpperBound })}`;
|
||||
const TTL = 60 * 60 * 1000;
|
||||
return this.handleCachedRequest(
|
||||
`upcoming:${JSON.stringify({ page, airingAtLowerBound, airingAtUpperBound })}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(GetUpcomingTitlesQuery, {
|
||||
page,
|
||||
@@ -210,34 +209,31 @@ export class AnilistDurableObject extends DurableObject {
|
||||
});
|
||||
return data?.Page;
|
||||
},
|
||||
DateTime.now().plus({ days: 1 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
async getUser(token: string) {
|
||||
const storageKey = `user:${token}`;
|
||||
// 1 month
|
||||
const TTL = 60 * 60 * 24 * 30 * 1000;
|
||||
return this.handleCachedRequest(
|
||||
`user:${token}`,
|
||||
storageKey,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(GetUserQuery, {}, { token });
|
||||
const data = await this.fetchFromAnilist(GetUserQuery, {}, token);
|
||||
return data?.Viewer;
|
||||
},
|
||||
DateTime.now().plus({ days: 30 }),
|
||||
TTL,
|
||||
);
|
||||
}
|
||||
|
||||
async getUserProfile(token: string) {
|
||||
return this.handleCachedRequest(
|
||||
`user_profile:${token}`,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(
|
||||
GetUserProfileQuery,
|
||||
{ token },
|
||||
{ token },
|
||||
);
|
||||
return data?.Viewer;
|
||||
},
|
||||
DateTime.now().plus({ days: 30 }),
|
||||
const data = await this.fetchFromAnilist(
|
||||
GetUserProfileQuery,
|
||||
{ token },
|
||||
token,
|
||||
);
|
||||
return data?.Viewer;
|
||||
}
|
||||
|
||||
async markEpisodeAsWatched(
|
||||
@@ -248,7 +244,7 @@ export class AnilistDurableObject extends DurableObject {
|
||||
const data = await this.fetchFromAnilist(
|
||||
MarkEpisodeAsWatchedMutation,
|
||||
{ titleId, episodeNumber },
|
||||
{ token },
|
||||
token,
|
||||
);
|
||||
return data?.SaveMediaListEntry;
|
||||
}
|
||||
@@ -257,29 +253,54 @@ export class AnilistDurableObject extends DurableObject {
|
||||
const data = await this.fetchFromAnilist(
|
||||
MarkTitleAsWatchedMutation,
|
||||
{ titleId },
|
||||
{ token },
|
||||
token,
|
||||
);
|
||||
return data?.SaveMediaListEntry;
|
||||
}
|
||||
|
||||
async getTitles(
|
||||
userName: string,
|
||||
page: number,
|
||||
statusFilters: (
|
||||
| "CURRENT"
|
||||
| "COMPLETED"
|
||||
| "PLANNING"
|
||||
| "DROPPED"
|
||||
| "PAUSED"
|
||||
| "REPEATING"
|
||||
)[],
|
||||
aniListToken: string,
|
||||
) {
|
||||
return await this.handleCachedRequest(
|
||||
`titles:${JSON.stringify({ page, statusFilters })}`,
|
||||
async () => {
|
||||
const data = await this.fetchFromAnilist(
|
||||
GetWatchingTitlesQuery,
|
||||
{ userName, page, statusFilters },
|
||||
aniListToken,
|
||||
);
|
||||
return data?.Page;
|
||||
},
|
||||
60 * 60 * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
// Helper to handle caching logic
|
||||
async handleCachedRequest<T>(
|
||||
key: string,
|
||||
fetcher: () => Promise<T>,
|
||||
ttl?: DateTime | ((data: T) => DateTime | undefined),
|
||||
ttl?: number,
|
||||
) {
|
||||
const cache = await this.state.storage.get(key);
|
||||
console.debug(`Retrieving request ${key} from cache:`, cache != null);
|
||||
if (cache) {
|
||||
return cache as T;
|
||||
return cache;
|
||||
}
|
||||
|
||||
const result = await fetcher();
|
||||
await this.state.storage.put(key, result);
|
||||
|
||||
const calculatedTtl = typeof ttl === "function" ? ttl(result) : ttl;
|
||||
if (calculatedTtl) {
|
||||
const alarmTime = calculatedTtl.toMillis();
|
||||
if (ttl) {
|
||||
const alarmTime = Date.now() + ttl;
|
||||
await this.state.storage.setAlarm(alarmTime);
|
||||
await this.state.storage.put(`alarm:${key}`, alarmTime);
|
||||
}
|
||||
@@ -290,13 +311,11 @@ export class AnilistDurableObject extends DurableObject {
|
||||
async alarm() {
|
||||
const now = Date.now();
|
||||
const alarms = await this.state.storage.list({ prefix: "alarm:" });
|
||||
console.debug(`Retrieved alarms from cache:`, Object.entries(alarms));
|
||||
for (const [key, ttl] of Object.entries(alarms)) {
|
||||
if (now >= ttl) {
|
||||
// The key in alarms is `alarm:${storageKey}`
|
||||
// We want to delete the storageKey
|
||||
const storageKey = key.replace("alarm:", "");
|
||||
console.debug(`Deleting storage key ${storageKey} & alarm ${key}`);
|
||||
await this.state.storage.delete(storageKey);
|
||||
await this.state.storage.delete(key);
|
||||
}
|
||||
@@ -306,11 +325,8 @@ export class AnilistDurableObject extends DurableObject {
|
||||
async fetchFromAnilist<Result = any, Variables = any>(
|
||||
query: TypedDocumentNode<Result, Variables>,
|
||||
variables: Variables,
|
||||
{
|
||||
token,
|
||||
shouldRetryOnRateLimit = true,
|
||||
}: { token?: string | undefined; shouldRetryOnRateLimit?: boolean } = {},
|
||||
): Promise<Result | undefined> {
|
||||
token?: string | undefined,
|
||||
): Promise<Result> {
|
||||
const headers: any = {
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
@@ -340,17 +356,14 @@ export class AnilistDurableObject extends DurableObject {
|
||||
});
|
||||
|
||||
// 1. Handle Rate Limiting (429)
|
||||
if (shouldRetryOnRateLimit && response.status === 429) {
|
||||
if (response.status === 429) {
|
||||
const retryAfter = await response
|
||||
.json<{ headers: Record<string, string> }>()
|
||||
.json()
|
||||
.then(({ headers }) => new Headers(headers).get("Retry-After"));
|
||||
console.log("429, retrying in", retryAfter);
|
||||
|
||||
await sleep(Number(retryAfter || 1) * 1000); // specific fallback or ensure logic
|
||||
return this.fetchFromAnilist(query, variables, {
|
||||
token,
|
||||
shouldRetryOnRateLimit: false,
|
||||
});
|
||||
return this.fetchFromAnilist(query, variables, token);
|
||||
}
|
||||
|
||||
// 2. Handle HTTP Errors (like 404 or 500)
|
||||
|
||||
@@ -1,24 +1,18 @@
|
||||
import { env } from "cloudflare:workers";
|
||||
|
||||
import { useMockData } from "~/libs/useMockData";
|
||||
import type { Title } from "~/types/title";
|
||||
|
||||
export async function fetchTitleFromAnilist(
|
||||
id: number,
|
||||
userId?: number | undefined,
|
||||
token?: string | undefined,
|
||||
): Promise<Title | undefined> {
|
||||
if (useMockData()) {
|
||||
const { mockTitleDetails } = await import("~/mocks");
|
||||
return mockTitleDetails();
|
||||
}
|
||||
|
||||
const durableObjectId = env.ANILIST_DO.idFromName(
|
||||
id.toString() + (token == null ? "" : "_" + token),
|
||||
);
|
||||
const stub = env.ANILIST_DO.get(durableObjectId);
|
||||
|
||||
const data = await stub.getTitle(id, userId, token);
|
||||
const data = await stub.getTitle(id, token);
|
||||
|
||||
if (!data) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -14,18 +14,6 @@ export const GetTitleQuery = graphql(
|
||||
[MediaFragment],
|
||||
);
|
||||
|
||||
export const GetTitleUserDataQuery = graphql(`
|
||||
query GetTitleUserData($id: Int!) {
|
||||
Media(id: $id) {
|
||||
mediaListEntry {
|
||||
id
|
||||
progress
|
||||
status
|
||||
}
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
export const SearchQuery = graphql(
|
||||
`
|
||||
query Search($query: String!, $page: Int!, $limit: Int!) {
|
||||
@@ -237,7 +225,33 @@ export const BrowsePopularQuery = graphql(
|
||||
...HomeTitle
|
||||
}
|
||||
}
|
||||
nextSeason: Page(page: 1, perPage: $limit) {
|
||||
nextSeason: Page(page: 1, perPage: 1) {
|
||||
media(
|
||||
season: $nextSeason
|
||||
seasonYear: $nextYear
|
||||
sort: START_DATE_DESC
|
||||
type: ANIME
|
||||
isAdult: false
|
||||
) {
|
||||
nextAiringEpisode {
|
||||
airingAt
|
||||
timeUntilAiring
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
[HomeTitleFragment],
|
||||
);
|
||||
|
||||
export const NextSeasonPopularQuery = graphql(
|
||||
`
|
||||
query NextSeasonPopular(
|
||||
$nextSeason: MediaSeason
|
||||
$nextYear: Int
|
||||
$limit: Int!
|
||||
) {
|
||||
Page(page: 1, perPage: $limit) {
|
||||
media(
|
||||
season: $nextSeason
|
||||
seasonYear: $nextYear
|
||||
@@ -253,26 +267,35 @@ export const BrowsePopularQuery = graphql(
|
||||
[HomeTitleFragment],
|
||||
);
|
||||
|
||||
export const NextSeasonPopularQuery = graphql(
|
||||
export const GetWatchingTitlesQuery = graphql(
|
||||
`
|
||||
query NextSeasonPopular(
|
||||
$nextSeason: MediaSeason
|
||||
$nextYear: Int
|
||||
$limit: Int!
|
||||
query GetWatchingTitles(
|
||||
$userName: String!
|
||||
$page: Int!
|
||||
$statusFilters: [MediaListStatus!]
|
||||
) {
|
||||
Page(page: $page, perPage: $limit) {
|
||||
media(
|
||||
season: $nextSeason
|
||||
seasonYear: $nextYear
|
||||
sort: POPULARITY_DESC
|
||||
Page(page: $page, perPage: 50) {
|
||||
mediaList(
|
||||
userName: $userName
|
||||
type: ANIME
|
||||
isAdult: false
|
||||
sort: UPDATED_TIME_DESC
|
||||
status_in: $statusFilters
|
||||
) {
|
||||
...HomeTitle
|
||||
media {
|
||||
...Media
|
||||
mediaListEntry {
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
}
|
||||
pageInfo {
|
||||
currentPage
|
||||
hasNextPage
|
||||
perPage
|
||||
total
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
[HomeTitleFragment],
|
||||
[MediaFragment],
|
||||
);
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import { Duration, type DurationLike } from "luxon";
|
||||
|
||||
interface CalculateExponentialBackoffOptions {
|
||||
attempt: number;
|
||||
baseMin?: DurationLike;
|
||||
absCap?: DurationLike;
|
||||
fuzzFactor?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a backoff time where both the Minimum floor and Maximum ceiling
|
||||
* are "fuzzed" with jitter to prevent clustering at the edges.
|
||||
*
|
||||
* @param attempt - The current retry attempt (0-indexed).
|
||||
* @param baseMin - The nominal minimum wait time (default: 1s).
|
||||
* @param absCap - The absolute maximum wait time (default: 60s).
|
||||
* @param fuzzFactor - How much to wobble the edges (0.1 = +/- 10%).
|
||||
*
|
||||
* @returns A random duration between the nominal minimum and maximum, in seconds.
|
||||
*/
|
||||
export function calculateExponentialBackoff({
|
||||
attempt,
|
||||
baseMin: baseMinDuration = Duration.fromObject({ minutes: 1 }),
|
||||
absCap: absCapDuration = Duration.fromObject({ hours: 1 }),
|
||||
fuzzFactor = 0.2,
|
||||
}: CalculateExponentialBackoffOptions): number {
|
||||
const baseMin = Duration.fromDurationLike(baseMinDuration).as("seconds");
|
||||
const absCap = Duration.fromDurationLike(absCapDuration).as("seconds");
|
||||
|
||||
// 1. Calculate nominal boundaries
|
||||
// Example: If baseMin is 1s, the nominal boundaries are 1s, 2s, 4s, 8s... (The 'ceiling' grows exponentially)
|
||||
const nominalMin = baseMin;
|
||||
const nominalCeiling = Math.min(baseMin * Math.pow(2, attempt), absCap);
|
||||
|
||||
// 2. Fuzz the Min (The Floor)
|
||||
// Example: If min is 1s and fuzz is 0.2, the floor becomes random between 0.8s and 1.2s
|
||||
const minFuzz = nominalMin * fuzzFactor;
|
||||
const fuzzedMin = nominalMin + (Math.random() * 2 * minFuzz - minFuzz);
|
||||
|
||||
// 3. Fuzz the Max (The Ceiling)
|
||||
// Example: If ceiling is 4s (and fuzz is 0.2), it becomes random between 3.2s and 4.8s
|
||||
const maxFuzz = nominalCeiling * fuzzFactor;
|
||||
const fuzzedCeiling =
|
||||
nominalCeiling + (Math.random() * 2 * maxFuzz - maxFuzz);
|
||||
|
||||
// Safety: Ensure we don't return a negative number or cross boundaries weirdly
|
||||
// (e.g. if fuzz makes min > max, we swap or clamp)
|
||||
const safeMin = Math.max(0, fuzzedMin);
|
||||
const safeMax = Math.max(safeMin, fuzzedCeiling);
|
||||
|
||||
// 4. Return random value in the new fuzzy range
|
||||
return safeMin + Math.random() * (safeMax - safeMin);
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { Case, changeStringCase } from "./changeStringCase";
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { fetchFromMultipleSources } from "./fetchFromMultipleSources";
|
||||
|
||||
describe("fetchFromMultipleSources", () => {
|
||||
it("no promises, throws exception", async () => {
|
||||
await expect(fetchFromMultipleSources([])).rejects.toThrow(
|
||||
it("no promises, throws exception", () => {
|
||||
expect(() => fetchFromMultipleSources([])).toThrow(
|
||||
"fetchPromises cannot be empty",
|
||||
);
|
||||
});
|
||||
@@ -30,7 +30,7 @@ describe("fetchFromMultipleSources", () => {
|
||||
() => Promise.resolve(3),
|
||||
]);
|
||||
|
||||
expect(errorOccurred).toBe(false);
|
||||
expect(errorOccurred).toBeFalse();
|
||||
});
|
||||
|
||||
it("has promises that all throw, returns null", async () => {
|
||||
@@ -48,7 +48,7 @@ describe("fetchFromMultipleSources", () => {
|
||||
() => Promise.reject(new Error("error")),
|
||||
]);
|
||||
|
||||
expect(errorOccurred).toBe(true);
|
||||
expect(errorOccurred).toBeTrue();
|
||||
});
|
||||
|
||||
it("has promises but cache has value, returns cached value", async () => {
|
||||
@@ -80,7 +80,7 @@ describe("fetchFromMultipleSources", () => {
|
||||
},
|
||||
);
|
||||
|
||||
expect(errorOccurred).toBe(false);
|
||||
expect(errorOccurred).toBeFalse();
|
||||
});
|
||||
|
||||
it("has promises, no cached value, no valid response, should not save in cache", async () => {
|
||||
|
||||
@@ -2,14 +2,13 @@ import { env as cloudflareEnv } from "cloudflare:workers";
|
||||
import mapKeys from "lodash.mapkeys";
|
||||
|
||||
import { Case, changeStringCase } from "../changeStringCase";
|
||||
import { readEnvVariable } from "../readEnvVariable";
|
||||
|
||||
export function getAdminSdkCredentials(
|
||||
env: Cloudflare.Env = cloudflareEnv,
|
||||
): AdminSdkCredentials {
|
||||
export function getAdminSdkCredentials(env: Cloudflare.Env = cloudflareEnv) {
|
||||
return mapKeys(
|
||||
JSON.parse(env.ADMIN_SDK_JSON) as AdminSdkCredentials,
|
||||
readEnvVariable<AdminSdkCredentials>("ADMIN_SDK_JSON", env),
|
||||
(_, key) => changeStringCase(key, Case.snake_case, Case.camelCase),
|
||||
) satisfies AdminSdkCredentials;
|
||||
) as unknown as AdminSdkCredentials;
|
||||
}
|
||||
|
||||
export interface AdminSdkCredentials {
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { server } from "~/mocks";
|
||||
|
||||
import type { AdminSdkCredentials } from "./getAdminSdkCredentials";
|
||||
import { verifyFcmToken } from "./verifyFcmToken";
|
||||
|
||||
server.listen();
|
||||
|
||||
const FAKE_ADMIN_SDK_JSON: AdminSdkCredentials = {
|
||||
type: "service_account",
|
||||
@@ -18,87 +23,29 @@ const FAKE_ADMIN_SDK_JSON: AdminSdkCredentials = {
|
||||
};
|
||||
|
||||
describe("verifyFcmToken", () => {
|
||||
const fcmToken = "test-token";
|
||||
let verifyFcmToken: typeof import("~/libs/gcloud/verifyFcmToken").verifyFcmToken;
|
||||
let sendFcmMessage: any;
|
||||
// it("valid token, returns true", async () => {
|
||||
// const token =
|
||||
// "7v8sy43aq0re4r8xe7rmr0cn1fsmh6phehnfla2pa73z899zmhyarivmkt4sj6pyv0py43u6p2sim6wz2vg9ypjp9rug1keoth7f6ll3gdvas4q020u3ah51r6bjgn51j6bd92ztmtof3ljpcm8q31njvndy65enm68";
|
||||
// const res = await verifyFcmToken(token, FAKE_ADMIN_SDK_JSON);
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
vi.doMock("~/libs/gcloud/getGoogleAuthToken", () => ({
|
||||
getGoogleAuthToken: vi.fn().mockResolvedValue("fake-token"),
|
||||
}));
|
||||
vi.doMock("~/libs/gcloud/sendFcmMessage", () => ({
|
||||
sendFcmMessage: vi.fn(),
|
||||
}));
|
||||
// expect(res).toBeTrue();
|
||||
// });
|
||||
|
||||
// Import the module under test AFTER mocking dependencies
|
||||
const verifyModule = await import("~/libs/gcloud/verifyFcmToken");
|
||||
verifyFcmToken = verifyModule.verifyFcmToken;
|
||||
it("invalid token, returns false", async () => {
|
||||
const token = "abc123";
|
||||
const res = await verifyFcmToken(token, FAKE_ADMIN_SDK_JSON);
|
||||
|
||||
const mockModule = await import("~/libs/gcloud/sendFcmMessage");
|
||||
sendFcmMessage = mockModule.sendFcmMessage;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.doUnmock("~/libs/gcloud/sendFcmMessage");
|
||||
vi.doUnmock("~/libs/gcloud/getGoogleAuthToken");
|
||||
});
|
||||
|
||||
it("returns true for valid token", async () => {
|
||||
sendFcmMessage.mockResolvedValue({
|
||||
name: "projects/test-26g38/messages/fake-message-id",
|
||||
});
|
||||
|
||||
const result = await verifyFcmToken(fcmToken, FAKE_ADMIN_SDK_JSON);
|
||||
|
||||
expect(result).toBe(true);
|
||||
// Since we are mocking the module, we can check if it was called
|
||||
expect(sendFcmMessage).toHaveBeenCalledWith(
|
||||
FAKE_ADMIN_SDK_JSON,
|
||||
{ name: "token_verification", token: fcmToken },
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it("returns false for invalid token (400)", async () => {
|
||||
sendFcmMessage.mockResolvedValue({
|
||||
error: {
|
||||
code: 400,
|
||||
message: "The registration token is not a valid FCM registration token",
|
||||
status: "INVALID_ARGUMENT",
|
||||
details: [],
|
||||
},
|
||||
});
|
||||
|
||||
const result = await verifyFcmToken("invalid-token", FAKE_ADMIN_SDK_JSON);
|
||||
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for not found token (404)", async () => {
|
||||
sendFcmMessage.mockResolvedValue({
|
||||
error: {
|
||||
code: 404,
|
||||
message: "Task not found",
|
||||
status: "NOT_FOUND",
|
||||
details: [],
|
||||
},
|
||||
});
|
||||
|
||||
const result = await verifyFcmToken("not-found-token", FAKE_ADMIN_SDK_JSON);
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(res).toBeFalse();
|
||||
});
|
||||
|
||||
it("invalid ADMIN_SDK_JSON, returns false", async () => {
|
||||
// Simulate error that would occur in sendFcmMessage (e.g. auth failure inside it)
|
||||
sendFcmMessage.mockRejectedValue(new Error("No email provided"));
|
||||
|
||||
const res = await verifyFcmToken("token", {
|
||||
const token =
|
||||
"7v8sy43aq0re4r8xe7rmr0cn1fsmh6phehnfla2pa73z899zmhyarivmkt4sj6pyv0py43u6p2sim6wz2vg9ypjp9rug1keoth7f6ll3gdvas4q020u3ah51r6bjgn51j6bd92ztmtof3ljpcm8q31njvndy65enm68";
|
||||
const res = await verifyFcmToken(token, {
|
||||
...FAKE_ADMIN_SDK_JSON,
|
||||
clientEmail: "",
|
||||
});
|
||||
|
||||
expect(res).toBe(false);
|
||||
expect(res).toBeFalse();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { DateTime } from "luxon";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { getCurrentAndNextSeason } from "./getCurrentAndNextSeason";
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { lazy } from "./lazy";
|
||||
|
||||
@@ -16,7 +16,7 @@ describe("lazy", () => {
|
||||
return "value";
|
||||
});
|
||||
|
||||
expect(setValue).toBe(false);
|
||||
expect(setValue).toBeFalse();
|
||||
});
|
||||
|
||||
it("lazy function called if get is called", () => {
|
||||
@@ -26,7 +26,7 @@ describe("lazy", () => {
|
||||
return "value";
|
||||
}).get();
|
||||
|
||||
expect(setValue).toBe(true);
|
||||
expect(setValue).toBeTrue();
|
||||
});
|
||||
|
||||
it("lazy function called only once if get is called multiple times", () => {
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
import { DateTime } from "luxon";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { maybeScheduleNextAiringEpisode } from "./maybeScheduleNextAiringEpisode";
|
||||
|
||||
vi.mock("~/models/unreleasedTitles", () => ({
|
||||
addUnreleasedTitle: vi.fn(),
|
||||
removeUnreleasedTitle: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("./anilist/getNextEpisodeAiringAt", () => ({
|
||||
getNextEpisodeTimeUntilAiring: vi.fn(),
|
||||
}));
|
||||
describe("maybeScheduleNextAiringEpisode", () => {
|
||||
let addUnreleasedTitle: any;
|
||||
let removeUnreleasedTitle: any;
|
||||
let getNextEpisodeTimeUntilAiring: any;
|
||||
let queueTask: any;
|
||||
let maybeScheduleNextAiringEpisode: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
|
||||
vi.doMock("~/models/unreleasedTitles", () => ({
|
||||
addUnreleasedTitle: vi.fn(),
|
||||
removeUnreleasedTitle: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.doMock("./anilist/getNextEpisodeAiringAt", () => ({
|
||||
getNextEpisodeTimeUntilAiring: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.doMock("./tasks/queueTask", () => ({
|
||||
queueTask: vi.fn(),
|
||||
}));
|
||||
|
||||
maybeScheduleNextAiringEpisode = (
|
||||
await import("./maybeScheduleNextAiringEpisode")
|
||||
).maybeScheduleNextAiringEpisode;
|
||||
|
||||
addUnreleasedTitle = (await import("~/models/unreleasedTitles"))
|
||||
.addUnreleasedTitle;
|
||||
removeUnreleasedTitle = (await import("~/models/unreleasedTitles"))
|
||||
.removeUnreleasedTitle;
|
||||
getNextEpisodeTimeUntilAiring = (
|
||||
await import("./anilist/getNextEpisodeAiringAt")
|
||||
).getNextEpisodeTimeUntilAiring;
|
||||
queueTask = (await import("./tasks/queueTask")).queueTask;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should add to unreleased titles if status is NOT_YET_RELEASED and no next airing", async () => {
|
||||
vi.mocked(getNextEpisodeTimeUntilAiring).mockResolvedValue({
|
||||
nextAiring: null,
|
||||
status: "NOT_YET_RELEASED",
|
||||
});
|
||||
|
||||
await maybeScheduleNextAiringEpisode(1);
|
||||
|
||||
expect(addUnreleasedTitle).toHaveBeenCalledWith(1);
|
||||
expect(queueTask).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should do nothing if status is RELEASING but no next airing (e.g. hiatus)", async () => {
|
||||
vi.mocked(getNextEpisodeTimeUntilAiring).mockResolvedValue({
|
||||
nextAiring: null,
|
||||
status: "RELEASING",
|
||||
});
|
||||
|
||||
await maybeScheduleNextAiringEpisode(2);
|
||||
|
||||
expect(addUnreleasedTitle).not.toHaveBeenCalled();
|
||||
expect(queueTask).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should do nothing if next airing is more than 30 days away", async () => {
|
||||
const farFuture = DateTime.now().plus({ days: 31 }).toSeconds();
|
||||
vi.mocked(getNextEpisodeTimeUntilAiring).mockResolvedValue({
|
||||
nextAiring: { airingAt: farFuture, episode: 2 },
|
||||
status: "RELEASING",
|
||||
});
|
||||
|
||||
await maybeScheduleNextAiringEpisode(3);
|
||||
|
||||
expect(addUnreleasedTitle).not.toHaveBeenCalled();
|
||||
expect(queueTask).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should schedule task and remove from unreleased if next airing is soon", async () => {
|
||||
const nearFuture = Math.floor(DateTime.now().plus({ days: 1 }).toSeconds());
|
||||
vi.mocked(getNextEpisodeTimeUntilAiring).mockResolvedValue({
|
||||
nextAiring: { airingAt: nearFuture, episode: 5 },
|
||||
status: "RELEASING",
|
||||
});
|
||||
|
||||
await maybeScheduleNextAiringEpisode(4);
|
||||
|
||||
expect(queueTask).toHaveBeenCalledWith(
|
||||
"NEW_EPISODE",
|
||||
{ aniListId: 4, episodeNumber: 5 },
|
||||
{ scheduleConfig: { epochTime: nearFuture } },
|
||||
);
|
||||
expect(removeUnreleasedTitle).toHaveBeenCalledWith(4);
|
||||
expect(addUnreleasedTitle).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should add to unreleased if next airing is null even with RELEASING status? No code says only NOT_YET_RELEASED", async () => {
|
||||
// Code: if (status === "NOT_YET_RELEASED") await addUnreleasedTitle(aniListId);
|
||||
// So if RELEASING and null, it does nothing.
|
||||
// Verified in second test case.
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -1,10 +1,10 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { PromiseTimedOutError, promiseTimeout } from "./promiseTimeout";
|
||||
|
||||
describe("promiseTimeout", () => {
|
||||
it("promise resolves within timeout, returns value", async () => {
|
||||
await expect(
|
||||
it("promise resolves within timeout, returns value", () => {
|
||||
expect(
|
||||
promiseTimeout(
|
||||
wait(1).then(() => 2),
|
||||
10,
|
||||
@@ -12,8 +12,8 @@ describe("promiseTimeout", () => {
|
||||
).resolves.toBe(2);
|
||||
});
|
||||
|
||||
it("promise does not resolve within timeout, throws PromiseTimedOutError", async () => {
|
||||
await expect(
|
||||
it("promise does not resolve within timeout, throws PromiseTimedOutError", () => {
|
||||
expect(
|
||||
promiseTimeout(
|
||||
wait(2).then(() => 2),
|
||||
1,
|
||||
|
||||
35
src/libs/readEnvVariable.spec.ts
Normal file
35
src/libs/readEnvVariable.spec.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { readEnvVariable } from "./readEnvVariable";
|
||||
|
||||
describe("readEnvVariable", () => {
|
||||
describe("env & variable defined", () => {
|
||||
it("returns boolean", () => {
|
||||
expect(
|
||||
readEnvVariable<boolean>("ENABLE_ANIFY", { ENABLE_ANIFY: "false" }),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("returns string", () => {
|
||||
expect(
|
||||
readEnvVariable<string>("QSTASH_TOKEN", {
|
||||
QSTASH_TOKEN: "ehf73g8gyriuvnieojwicbg83hc",
|
||||
}),
|
||||
).toBe("ehf73g8gyriuvnieojwicbg83hc");
|
||||
});
|
||||
|
||||
it("returns number", () => {
|
||||
expect(
|
||||
readEnvVariable<number>("NUM_RETRIES", { NUM_RETRIES: "123" }),
|
||||
).toBe(123);
|
||||
});
|
||||
});
|
||||
|
||||
it("env defined but variable not defined, returns default value", () => {
|
||||
expect(readEnvVariable<boolean>("ENABLE_ANIFY", { FOO: "bar" })).toBe(true);
|
||||
});
|
||||
|
||||
it("env not defined, returns default value", () => {
|
||||
expect(readEnvVariable<boolean>("ENABLE_ANIFY", undefined)).toBe(true);
|
||||
});
|
||||
});
|
||||
22
src/libs/readEnvVariable.ts
Normal file
22
src/libs/readEnvVariable.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { env as cloudflareEnv } from "cloudflare:workers";
|
||||
import type { Bindings } from "hono/types";
|
||||
|
||||
type EnvVariable = keyof Cloudflare.Env;
|
||||
const defaultValues: Record<EnvVariable, any> = {
|
||||
ENABLE_ANIFY: true,
|
||||
};
|
||||
|
||||
export function readEnvVariable<T>(
|
||||
envVariable: EnvVariable,
|
||||
env: Bindings | undefined = cloudflareEnv,
|
||||
): T {
|
||||
try {
|
||||
return JSON.parse(env?.[envVariable] ?? null) ?? defaultValues[envVariable];
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
return env![envVariable];
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
|
||||
import { sortByProperty } from "./sortByProperty";
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { beforeEach, describe, expect, it } from "vitest";
|
||||
import { DateTime } from "luxon";
|
||||
|
||||
import { beforeEach, describe, expect, it, mock } from "bun:test";
|
||||
|
||||
import type { DelayedTaskMetadata } from "./delayedTask";
|
||||
import {
|
||||
|
||||
@@ -1,158 +1,194 @@
|
||||
import { env } from "cloudflare:test";
|
||||
import { DateTime } from "luxon";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { beforeEach, describe, expect, it, mock } from "bun:test";
|
||||
|
||||
import { getTestEnv } from "../test/getTestEnv";
|
||||
import { processDelayedTasks } from "./processDelayedTasks";
|
||||
|
||||
describe("processDelayedTasks", () => {
|
||||
beforeEach(async () => {
|
||||
const tasksToDelete = await env.DELAYED_TASKS.list({
|
||||
prefix: "delayed-task:",
|
||||
});
|
||||
console.log(`Found ${tasksToDelete.keys.length} tasks to delete`);
|
||||
for (const task of tasksToDelete.keys) {
|
||||
await env.DELAYED_TASKS.delete(task.name);
|
||||
}
|
||||
let mockEnv: Cloudflare.Env;
|
||||
let mockCtx: ExecutionContext;
|
||||
let kvGetSpy: ReturnType<typeof mock>;
|
||||
let kvDeleteSpy: ReturnType<typeof mock>;
|
||||
let kvPutSpy: ReturnType<typeof mock>;
|
||||
let queueSendSpy: ReturnType<typeof mock>;
|
||||
|
||||
beforeEach(() => {
|
||||
kvGetSpy = mock(() => Promise.resolve(null));
|
||||
kvDeleteSpy = mock(() => Promise.resolve());
|
||||
kvPutSpy = mock(() => Promise.resolve());
|
||||
queueSendSpy = mock(() => Promise.resolve());
|
||||
|
||||
mockEnv = {
|
||||
DELAYED_TASKS: {
|
||||
get: kvGetSpy,
|
||||
delete: kvDeleteSpy,
|
||||
put: kvPutSpy,
|
||||
list: mock(() => Promise.resolve({ keys: [], list_complete: true })),
|
||||
getWithMetadata: mock(() =>
|
||||
Promise.resolve({ value: null, metadata: null }),
|
||||
),
|
||||
} as any,
|
||||
NEW_EPISODE: {
|
||||
send: queueSendSpy,
|
||||
} as any,
|
||||
ANILIST_UPDATES: {
|
||||
send: mock(() => Promise.resolve()),
|
||||
} as any,
|
||||
} as any;
|
||||
|
||||
mockCtx = {
|
||||
waitUntil: mock(() => {}),
|
||||
passThroughOnException: mock(() => {}),
|
||||
} as any;
|
||||
});
|
||||
|
||||
it("handles empty KV namespace", async () => {
|
||||
await processDelayedTasks(env);
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
await expect(
|
||||
env.DELAYED_TASKS.list({ prefix: "delayed-task:" }).then(
|
||||
(result) => result.keys,
|
||||
),
|
||||
).resolves.toHaveLength(0);
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("queues tasks within 9 hours of scheduled time", async () => {
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 6 }).toSeconds();
|
||||
it("queues tasks within 12 hours of scheduled time", async () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 6 * 3600; // 6 hours from now
|
||||
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 123, episodeNumber: 1 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-1",
|
||||
createdAt: now.minus({ hours: 18 }).toSeconds(),
|
||||
createdAt: now - 18 * 3600,
|
||||
retryCount: 0,
|
||||
};
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-1`,
|
||||
JSON.stringify(taskMetadata),
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = mock(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-1` }],
|
||||
list_complete: true,
|
||||
}),
|
||||
);
|
||||
|
||||
await processDelayedTasks(env);
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-1`),
|
||||
).resolves.toBeNull();
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).toHaveBeenCalledTimes(1);
|
||||
expect(kvDeleteSpy).toHaveBeenCalledTimes(1);
|
||||
expect(kvDeleteSpy).toHaveBeenCalledWith(
|
||||
`delayed-task:${scheduledTime}:task-1`,
|
||||
);
|
||||
});
|
||||
|
||||
it("does not queue tasks beyond 9 hours", async () => {
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 24 }).toSeconds();
|
||||
it("does not queue tasks beyond 12 hours", async () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 24 * 3600; // 24 hours from now
|
||||
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 456, episodeNumber: 2 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-2",
|
||||
createdAt: now.toSeconds(),
|
||||
createdAt: now,
|
||||
retryCount: 0,
|
||||
};
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-2`,
|
||||
JSON.stringify(taskMetadata),
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = mock(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-2` }],
|
||||
list_complete: true,
|
||||
}),
|
||||
);
|
||||
|
||||
await processDelayedTasks(env);
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(`delayed-task:${scheduledTime}:task-2`),
|
||||
).resolves.toBeTruthy();
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("increments retry count on queue failure", async () => {
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 1 * 3600; // 1 hour from now
|
||||
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 789, episodeNumber: 3 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-3",
|
||||
createdAt: now.minus({ hours: 23 }).toSeconds(),
|
||||
createdAt: now - 23 * 3600,
|
||||
retryCount: 0,
|
||||
};
|
||||
const mockEnv = getTestEnv({
|
||||
NEW_EPISODE: {
|
||||
send: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
},
|
||||
});
|
||||
await mockEnv.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-3`,
|
||||
JSON.stringify(taskMetadata),
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = mock(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-3` }],
|
||||
list_complete: true,
|
||||
}),
|
||||
);
|
||||
|
||||
await processDelayedTasks(mockEnv);
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
queueSendSpy.mockRejectedValue(new Error("Queue error"));
|
||||
|
||||
const updatedMetadata = JSON.parse(
|
||||
(await mockEnv.DELAYED_TASKS.get(
|
||||
`delayed-task:${scheduledTime}:task-3`,
|
||||
))!,
|
||||
);
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(kvPutSpy).toHaveBeenCalledTimes(1);
|
||||
const updatedMetadata = JSON.parse(kvPutSpy.mock.calls[0][1]);
|
||||
expect(updatedMetadata.retryCount).toBe(1);
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("logs alert after 3 failed attempts", async () => {
|
||||
const consoleErrorSpy = vi.fn(() => {});
|
||||
const consoleErrorSpy = mock(() => {});
|
||||
const originalConsoleError = console.error;
|
||||
console.error = consoleErrorSpy as any;
|
||||
const now = DateTime.now();
|
||||
const scheduledTime = now.plus({ hours: 1 }).toSeconds();
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const scheduledTime = now + 1 * 3600;
|
||||
|
||||
const taskMetadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 789, episodeNumber: 4 },
|
||||
body: { aniListId: 999, episodeNumber: 4 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: scheduledTime,
|
||||
taskId: "task-4",
|
||||
createdAt: now.minus({ hours: 23 }).toSeconds(),
|
||||
retryCount: 2,
|
||||
createdAt: now - 23 * 3600,
|
||||
retryCount: 2, // Will become 3 after this failure
|
||||
};
|
||||
const mockEnv = getTestEnv({
|
||||
NEW_EPISODE: {
|
||||
send: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
sendBatch: vi.fn().mockRejectedValue(new Error("Queue error")),
|
||||
},
|
||||
});
|
||||
await mockEnv.DELAYED_TASKS.put(
|
||||
`delayed-task:${scheduledTime}:task-4`,
|
||||
JSON.stringify(taskMetadata),
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = mock(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: `delayed-task:${scheduledTime}:task-4` }],
|
||||
list_complete: true,
|
||||
}),
|
||||
);
|
||||
|
||||
await processDelayedTasks(mockEnv);
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(JSON.stringify(taskMetadata)));
|
||||
queueSendSpy.mockRejectedValue(new Error("Queue error"));
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
// Check that alert was logged
|
||||
const alertCalls = consoleErrorSpy.mock.calls.filter((call: any) =>
|
||||
call[0]?.includes("🚨 ALERT"),
|
||||
);
|
||||
expect(alertCalls.length).toBeGreaterThan(0);
|
||||
|
||||
console.error = originalConsoleError;
|
||||
});
|
||||
|
||||
it("handles multiple tasks in single cron run", async () => {
|
||||
const now = DateTime.now();
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
|
||||
const task1Metadata = {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 100, episodeNumber: 1 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: now.plus({ hours: 2 }).toSeconds(),
|
||||
scheduledEpochTime: now + 2 * 3600,
|
||||
taskId: "task-1",
|
||||
createdAt: now.minus({ hours: 20 }).toSeconds(),
|
||||
createdAt: now - 20 * 3600,
|
||||
retryCount: 0,
|
||||
};
|
||||
|
||||
@@ -160,53 +196,45 @@ describe("processDelayedTasks", () => {
|
||||
queueName: "NEW_EPISODE",
|
||||
body: { aniListId: 200, episodeNumber: 2 },
|
||||
headers: { "Content-Type": "application/json" },
|
||||
scheduledEpochTime: now.plus({ hours: 5 }).toSeconds(),
|
||||
scheduledEpochTime: now + 5 * 3600,
|
||||
taskId: "task-2",
|
||||
createdAt: now.minus({ hours: 19 }).toSeconds(),
|
||||
createdAt: now - 19 * 3600,
|
||||
retryCount: 0,
|
||||
};
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
|
||||
JSON.stringify(task1Metadata),
|
||||
);
|
||||
await env.DELAYED_TASKS.put(
|
||||
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
|
||||
JSON.stringify(task2Metadata),
|
||||
|
||||
mockEnv.DELAYED_TASKS.list = mock(() =>
|
||||
Promise.resolve({
|
||||
keys: [
|
||||
{ name: `delayed-task:${task1Metadata.scheduledEpochTime}:task-1` },
|
||||
{ name: `delayed-task:${task2Metadata.scheduledEpochTime}:task-2` },
|
||||
],
|
||||
list_complete: true,
|
||||
}),
|
||||
);
|
||||
|
||||
await processDelayedTasks(env);
|
||||
kvGetSpy
|
||||
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task1Metadata)))
|
||||
.mockReturnValueOnce(Promise.resolve(JSON.stringify(task2Metadata)));
|
||||
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(
|
||||
`delayed-task:${task1Metadata.scheduledEpochTime}:task-1`,
|
||||
),
|
||||
).resolves.toBeNull();
|
||||
await expect(
|
||||
env.DELAYED_TASKS.get(
|
||||
`delayed-task:${task2Metadata.scheduledEpochTime}:task-2`,
|
||||
),
|
||||
).resolves.toBeNull();
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).toHaveBeenCalledTimes(2);
|
||||
expect(kvDeleteSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it("skips tasks with null values in KV", async () => {
|
||||
const queueSendSpy = vi.fn().mockResolvedValue(undefined);
|
||||
const mockEnv = getTestEnv({
|
||||
NEW_EPISODE: {
|
||||
send: queueSendSpy,
|
||||
sendBatch: queueSendSpy,
|
||||
},
|
||||
ANILIST_UPDATES: {
|
||||
send: queueSendSpy,
|
||||
sendBatch: queueSendSpy,
|
||||
},
|
||||
});
|
||||
await mockEnv.DELAYED_TASKS.put(`delayed-task:123:invalid`, null);
|
||||
mockEnv.DELAYED_TASKS.list = mock(() =>
|
||||
Promise.resolve({
|
||||
keys: [{ name: "delayed-task:123:invalid" }],
|
||||
list_complete: true,
|
||||
}),
|
||||
);
|
||||
|
||||
await processDelayedTasks(mockEnv);
|
||||
kvGetSpy.mockReturnValue(Promise.resolve(null));
|
||||
|
||||
await processDelayedTasks(mockEnv, mockCtx);
|
||||
|
||||
expect(queueSendSpy).not.toHaveBeenCalled();
|
||||
await expect(
|
||||
mockEnv.DELAYED_TASKS.get(`delayed-task:123:invalid`),
|
||||
).resolves.toBeNull();
|
||||
expect(kvDeleteSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,11 +2,15 @@ import { DateTime } from "luxon";
|
||||
|
||||
import type { DelayedTaskMetadata } from "./delayedTask";
|
||||
import { deserializeDelayedTask } from "./delayedTask";
|
||||
import { MAX_QUEUE_DELAY_SECONDS, queueTask } from "./queueTask";
|
||||
import { queueTask } from "./queueTask";
|
||||
|
||||
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
|
||||
const RETRY_ALERT_THRESHOLD = 3;
|
||||
|
||||
export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
|
||||
export async function processDelayedTasks(
|
||||
env: Cloudflare.Env,
|
||||
ctx: ExecutionContext,
|
||||
): Promise<void> {
|
||||
console.log("Starting delayed task processing cron job");
|
||||
|
||||
const kvNamespace = env.DELAYED_TASKS;
|
||||
@@ -27,7 +31,7 @@ export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
|
||||
console.log(`Found ${keys.length} delayed tasks to check`);
|
||||
|
||||
const currentTime = Math.floor(Date.now() / 1000);
|
||||
const maxQueueTime = currentTime + MAX_QUEUE_DELAY_SECONDS;
|
||||
const twelveHoursFromNow = currentTime + MAX_DELAY_SECONDS;
|
||||
|
||||
let processedCount = 0;
|
||||
let queuedCount = 0;
|
||||
@@ -36,17 +40,16 @@ export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
|
||||
for (const key of keys) {
|
||||
try {
|
||||
const value = await kvNamespace.get(key.name);
|
||||
if (!value || value == "null") {
|
||||
console.warn(`Task key ${key.name} has no value, removing`);
|
||||
await kvNamespace.delete(key.name);
|
||||
if (!value) {
|
||||
console.warn(`Task key ${key.name} has no value, skipping`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const metadata: DelayedTaskMetadata = deserializeDelayedTask(value);
|
||||
processedCount++;
|
||||
|
||||
// Check if task is ready to be queued (within 9 hours of scheduled time)
|
||||
if (metadata.scheduledEpochTime <= maxQueueTime) {
|
||||
// Check if task is ready to be queued (within 12 hours of scheduled time)
|
||||
if (metadata.scheduledEpochTime <= twelveHoursFromNow) {
|
||||
const remainingDelay = Math.max(
|
||||
0,
|
||||
metadata.scheduledEpochTime - currentTime,
|
||||
@@ -97,7 +100,7 @@ export async function processDelayedTasks(env: Cloudflare.Env): Promise<void> {
|
||||
}
|
||||
} else {
|
||||
const hoursUntilReady =
|
||||
(metadata.scheduledEpochTime - maxQueueTime) / 3600;
|
||||
(metadata.scheduledEpochTime - twelveHoursFromNow) / 3600;
|
||||
console.log(
|
||||
`Task ${metadata.taskId} not ready yet (${hoursUntilReady.toFixed(1)} hours until queueable)`,
|
||||
);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { beforeEach, describe, expect, it, mock, spyOn } from "bun:test";
|
||||
|
||||
import { queueTask } from "./queueTask";
|
||||
|
||||
@@ -6,20 +6,20 @@ describe("queueTask - delayed task handling", () => {
|
||||
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds
|
||||
|
||||
let mockEnv: Cloudflare.Env;
|
||||
let kvPutSpy: ReturnType<typeof vi.fn>;
|
||||
let queueSendSpy: ReturnType<typeof vi.fn>;
|
||||
let kvPutSpy: ReturnType<typeof mock>;
|
||||
let queueSendSpy: ReturnType<typeof mock>;
|
||||
|
||||
beforeEach(() => {
|
||||
kvPutSpy = vi.fn(() => Promise.resolve());
|
||||
queueSendSpy = vi.fn(() => Promise.resolve());
|
||||
kvPutSpy = mock(() => Promise.resolve());
|
||||
queueSendSpy = mock(() => Promise.resolve());
|
||||
|
||||
mockEnv = {
|
||||
DELAYED_TASKS: {
|
||||
put: kvPutSpy,
|
||||
get: vi.fn(() => Promise.resolve(null)),
|
||||
delete: vi.fn(() => Promise.resolve()),
|
||||
list: vi.fn(() => Promise.resolve({ keys: [], list_complete: true })),
|
||||
getWithMetadata: vi.fn(() =>
|
||||
get: mock(() => Promise.resolve(null)),
|
||||
delete: mock(() => Promise.resolve()),
|
||||
list: mock(() => Promise.resolve({ keys: [], list_complete: true })),
|
||||
getWithMetadata: mock(() =>
|
||||
Promise.resolve({ value: null, metadata: null }),
|
||||
),
|
||||
} as any,
|
||||
@@ -27,16 +27,16 @@ describe("queueTask - delayed task handling", () => {
|
||||
send: queueSendSpy,
|
||||
} as any,
|
||||
ANILIST_UPDATES: {
|
||||
send: vi.fn(() => Promise.resolve()),
|
||||
send: mock(() => Promise.resolve()),
|
||||
} as any,
|
||||
} as any;
|
||||
|
||||
// Mock crypto.randomUUID
|
||||
(globalThis as any).crypto = { randomUUID: vi.fn(() => "test-uuid-123") };
|
||||
globalThis.crypto.randomUUID = mock(() => "test-uuid-123");
|
||||
});
|
||||
|
||||
describe("tasks with delay <= 9 hours", () => {
|
||||
it("queues task directly when delay is less than 9 hours", async () => {
|
||||
describe("tasks with delay <= 12 hours", () => {
|
||||
it("queues task directly when delay is less than 12 hours", async () => {
|
||||
await queueTask(
|
||||
"NEW_EPISODE",
|
||||
{ aniListId: 123, episodeNumber: 1 },
|
||||
@@ -52,12 +52,12 @@ describe("queueTask - delayed task handling", () => {
|
||||
expect(kvPutSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("queues task directly when delay is exactly 9 hours", async () => {
|
||||
it("queues task directly when delay is exactly 12 hours", async () => {
|
||||
await queueTask(
|
||||
"NEW_EPISODE",
|
||||
{ aniListId: 456, episodeNumber: 2 },
|
||||
{
|
||||
scheduleConfig: { delay: { hours: 9 } },
|
||||
scheduleConfig: { delay: { hours: 12 } },
|
||||
env: mockEnv,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -9,11 +9,9 @@ import type { QueueName } from "./queueName";
|
||||
|
||||
export type QueueBody = {
|
||||
ANILIST_UPDATES: {
|
||||
[AnilistUpdateType.UpdateWatchStatus]: {
|
||||
titleId: number;
|
||||
watchStatus: WatchStatus | null;
|
||||
aniListToken: string;
|
||||
};
|
||||
deviceId: string;
|
||||
watchStatus: WatchStatus | null;
|
||||
titleId: number;
|
||||
updateType: AnilistUpdateType;
|
||||
};
|
||||
NEW_EPISODE: { aniListId: number; episodeNumber: number };
|
||||
@@ -30,10 +28,6 @@ interface QueueTaskOptionalArgs {
|
||||
env?: Cloudflare.Env;
|
||||
}
|
||||
|
||||
export const MAX_QUEUE_DELAY_SECONDS = Duration.fromObject({ hours: 12 }).as(
|
||||
"seconds",
|
||||
);
|
||||
|
||||
export async function queueTask(
|
||||
queueName: QueueName,
|
||||
body: QueueBody[QueueName],
|
||||
@@ -46,14 +40,17 @@ export async function queueTask(
|
||||
req?.header(),
|
||||
);
|
||||
|
||||
const MAX_DELAY_SECONDS = 12 * 60 * 60; // 43,200 seconds (12 hours)
|
||||
|
||||
// If delay exceeds 12 hours, store in KV for later processing
|
||||
if (scheduleTime > MAX_QUEUE_DELAY_SECONDS) {
|
||||
if (scheduleTime > MAX_DELAY_SECONDS) {
|
||||
if (!env || !env.DELAYED_TASKS) {
|
||||
throw new Error("DELAYED_TASKS KV namespace not available");
|
||||
}
|
||||
|
||||
const { generateTaskKey, serializeDelayedTask } =
|
||||
await import("./delayedTask");
|
||||
const { generateTaskKey, serializeDelayedTask } = await import(
|
||||
"./delayedTask"
|
||||
);
|
||||
const taskId = crypto.randomUUID();
|
||||
const scheduledEpochTime = Math.floor(Date.now() / 1000) + scheduleTime;
|
||||
|
||||
@@ -132,9 +129,6 @@ function buildTask(
|
||||
scheduleTime = Duration.fromDurationLike(delay).as("second");
|
||||
}
|
||||
}
|
||||
const authorizationHeader = headers?.["X-Anilist-Token"]
|
||||
? { Authorization: `Bearer ${headers["X-Anilist-Token"]}` }
|
||||
: {};
|
||||
|
||||
switch (queueName) {
|
||||
case "ANILIST_UPDATES":
|
||||
@@ -143,8 +137,8 @@ function buildTask(
|
||||
body,
|
||||
scheduleTime,
|
||||
headers: {
|
||||
...authorizationHeader,
|
||||
"Content-Type": "application/json",
|
||||
"X-Anilist-Token": headers?.["X-Anilist-Token"],
|
||||
},
|
||||
};
|
||||
default:
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
vi.stubGlobal("fetch", vi.fn());
|
||||
|
||||
describe("removeTask", () => {
|
||||
let removeTask: any;
|
||||
let getAdminSdkCredentials: any;
|
||||
let getGoogleAuthToken: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
vi.doMock("cloudflare:workers", () => ({ env: {} }));
|
||||
vi.doMock("../gcloud/getAdminSdkCredentials", () => ({
|
||||
getAdminSdkCredentials: vi.fn(),
|
||||
}));
|
||||
vi.doMock("../gcloud/getGoogleAuthToken", () => ({
|
||||
getGoogleAuthToken: vi.fn(),
|
||||
}));
|
||||
|
||||
removeTask = (await import("./removeTask")).removeTask;
|
||||
getAdminSdkCredentials = (await import("../gcloud/getAdminSdkCredentials"))
|
||||
.getAdminSdkCredentials;
|
||||
getGoogleAuthToken = (await import("../gcloud/getGoogleAuthToken"))
|
||||
.getGoogleAuthToken;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("should call Google Cloud Tasks API with correct parameters", async () => {
|
||||
const mockCredentials = { projectId: "test-project" };
|
||||
vi.mocked(getAdminSdkCredentials).mockReturnValue(mockCredentials);
|
||||
vi.mocked(getGoogleAuthToken).mockResolvedValue("test-token");
|
||||
vi.mocked(fetch).mockResolvedValue(new Response(""));
|
||||
|
||||
await removeTask("NEW_EPISODE", "task-123");
|
||||
|
||||
expect(fetch).toHaveBeenCalledWith(
|
||||
"https://content-cloudtasks.googleapis.com/v2/projects/test-project/locations/northamerica-northeast1/queues/NEW_EPISODE/tasks/task-123",
|
||||
expect.objectContaining({
|
||||
method: "DELETE",
|
||||
headers: { Authorization: "Bearer test-token" },
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -2,6 +2,6 @@ import { getDb } from "~/models/db";
|
||||
|
||||
import { getTestEnv } from "./getTestEnv";
|
||||
|
||||
export function getTestDb(env?: Cloudflare.Env) {
|
||||
return getDb(env ?? getTestEnv());
|
||||
export function getTestDb() {
|
||||
return getDb(getTestEnv());
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { env } from "cloudflare:test";
|
||||
|
||||
/** Should only be used when it doesn't make sense for 'Bindings' or 'Variables' to be set. Otherwise, use getTestEnv(). */
|
||||
export function getTestEnvVariables(): Cloudflare.Env {
|
||||
return getTestEnv();
|
||||
@@ -7,13 +5,14 @@ export function getTestEnvVariables(): Cloudflare.Env {
|
||||
|
||||
export function getTestEnv({
|
||||
ADMIN_SDK_JSON = '{"client_email": "test@test.com", "project_id": "test-26g38"}',
|
||||
LOG_DB_QUERIES = "false",
|
||||
...mockEnv
|
||||
ENABLE_ANIFY = "true",
|
||||
TURSO_AUTH_TOKEN = "123",
|
||||
TURSO_URL = "http://127.0.0.1:3001",
|
||||
}: Partial<Cloudflare.Env> = {}): Cloudflare.Env {
|
||||
return {
|
||||
...env,
|
||||
ADMIN_SDK_JSON,
|
||||
LOG_DB_QUERIES,
|
||||
...mockEnv,
|
||||
ENABLE_ANIFY,
|
||||
TURSO_AUTH_TOKEN,
|
||||
TURSO_URL,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,7 +2,9 @@ import { tables } from "~/models/schema";
|
||||
|
||||
import { getTestDb } from "./getTestDb";
|
||||
|
||||
export async function resetTestDb(db = getTestDb()) {
|
||||
export async function resetTestDb() {
|
||||
const db = getTestDb();
|
||||
|
||||
for (const table of tables) {
|
||||
await db.delete(table);
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { createMiddleware } from "hono/factory";
|
||||
|
||||
import type { User } from "~/types/user";
|
||||
|
||||
export const userProfileMiddleware = createMiddleware<
|
||||
Cloudflare.Env & {
|
||||
Variables: {
|
||||
user: User;
|
||||
};
|
||||
Bindings: Env;
|
||||
}
|
||||
>(async (c, next) => {
|
||||
const aniListToken = await c.req.header("X-AniList-Token");
|
||||
if (!aniListToken) {
|
||||
return next();
|
||||
}
|
||||
|
||||
const user = await c.env.ANILIST_DO.getByName("GLOBAL").getUser(aniListToken);
|
||||
if (!user) {
|
||||
return c.json({ error: "User not found" }, 401);
|
||||
}
|
||||
|
||||
c.set("user", user);
|
||||
return next();
|
||||
});
|
||||
15
src/mocks/anilist/deleteMediaListEntry.ts
Normal file
15
src/mocks/anilist/deleteMediaListEntry.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { HttpResponse, graphql } from "msw";
|
||||
|
||||
export function deleteAnilistMediaListEntry() {
|
||||
return graphql.mutation(
|
||||
"DeleteMediaListEntry",
|
||||
({ variables: { entryId } }) =>
|
||||
HttpResponse.json({
|
||||
data: {
|
||||
DeleteMediaListEntry: {
|
||||
deleted: entryId > 0,
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
35
src/mocks/anilist/mediaListEntry.ts
Normal file
35
src/mocks/anilist/mediaListEntry.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { HttpResponse, graphql } from "msw";
|
||||
|
||||
export function getAnilistMediaListEntry() {
|
||||
return graphql.query("GetMediaListEntry", ({ variables: { titleId } }) => {
|
||||
if (titleId === 10) {
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Media: {
|
||||
mediaListEntry: {
|
||||
id: 123456,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
} else if (titleId === 139518) {
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Media: {
|
||||
mediaListEntry: {
|
||||
id: 123457,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Media: {
|
||||
mediaListEntry: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
16
src/mocks/anilist/nextAiringEpisode.ts
Normal file
16
src/mocks/anilist/nextAiringEpisode.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { HttpResponse, graphql } from "msw";
|
||||
|
||||
export function getAnilistNextAiringEpisode() {
|
||||
return graphql.query(
|
||||
"GetNextEpisodeAiringAt",
|
||||
({ variables: { titleId } }) => {
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Media: {
|
||||
nextAiringEpisode: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
575
src/mocks/anilist/search.ts
Normal file
575
src/mocks/anilist/search.ts
Normal file
@@ -0,0 +1,575 @@
|
||||
import { HttpResponse, graphql } from "msw";
|
||||
|
||||
export function getAnilistSearchResults() {
|
||||
return graphql.query("Search", ({ variables: { query, page } }) => {
|
||||
console.log(`Intercepting Search query with ${query} and page ${page}`);
|
||||
|
||||
if (!query || query === "a" || query === "aniwatch") {
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Page: {
|
||||
media: [],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Page: {
|
||||
media: [
|
||||
{
|
||||
id: 151807,
|
||||
title: {
|
||||
userPreferred: "Ore dake Level Up na Ken",
|
||||
english: "Solo Leveling",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx151807-yxY3olrjZH4k.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx151807-yxY3olrjZH4k.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx151807-yxY3olrjZH4k.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2759,
|
||||
title: {
|
||||
userPreferred: "Evangelion Shin Movie: Jo",
|
||||
english: "Evangelion: 1.0 You Are (Not) Alone",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx2759-z07kq8Pnw5B1.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx2759-z07kq8Pnw5B1.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx2759-z07kq8Pnw5B1.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 139589,
|
||||
title: {
|
||||
userPreferred: "Kotarou wa Hitorigurashi",
|
||||
english: "Kotaro Lives Alone",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx139589-oFz7JwpwRkQV.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx139589-oFz7JwpwRkQV.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx139589-oFz7JwpwRkQV.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 145815,
|
||||
title: {
|
||||
userPreferred:
|
||||
"Noumin Kanren no Skill Bakka Agetetara Naze ka Tsuyoku Natta.",
|
||||
english:
|
||||
"I've Somehow Gotten Stronger When I Improved My Farm-Related Skills",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx145815-XsgcXy7WzgtK.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx145815-XsgcXy7WzgtK.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx145815-XsgcXy7WzgtK.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 176496,
|
||||
title: {
|
||||
userPreferred:
|
||||
"Ore dake Level Up na Ken: Season 2 - Arise from the Shadow",
|
||||
english: "Solo Leveling Season 2 -Arise from the Shadow-",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx176496-r6oXxEqdZL0n.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx176496-r6oXxEqdZL0n.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx176496-r6oXxEqdZL0n.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 1965,
|
||||
title: {
|
||||
userPreferred: "sola",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx1965-lWBpcTni9PS9.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx1965-lWBpcTni9PS9.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx1965-lWBpcTni9PS9.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 118123,
|
||||
title: {
|
||||
userPreferred: "Holo no Graffiti",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx118123-xqn5fYsjKXJU.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx118123-xqn5fYsjKXJU.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx118123-xqn5fYsjKXJU.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2582,
|
||||
title: {
|
||||
userPreferred: "Soukou Kihei Votoms",
|
||||
english: "Armored Trooper Votoms",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx2582-aB1Vh1jDobQ3.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx2582-aB1Vh1jDobQ3.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx2582-aB1Vh1jDobQ3.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 116384,
|
||||
title: {
|
||||
userPreferred: "Sol Levante",
|
||||
english: "Sol Levante",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx116384-xn0nQAKGFSd7.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx116384-xn0nQAKGFSd7.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx116384-xn0nQAKGFSd7.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 104073,
|
||||
title: {
|
||||
userPreferred: "Sono Toki, Kanojo wa.",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/nx104073-OQ8YBTy7zmKf.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/nx104073-OQ8YBTy7zmKf.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/nx104073-OQ8YBTy7zmKf.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 15313,
|
||||
title: {
|
||||
userPreferred: "Wooser no Sono Higurashi",
|
||||
english: "Wooser's Hand-to-Mouth Life",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/15313.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/15313.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/15313.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 8068,
|
||||
title: {
|
||||
userPreferred: "Kuroshitsuji Picture Drama",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/8068.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/8068.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/8068.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3174,
|
||||
title: {
|
||||
userPreferred: "sola Specials",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/3174.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/3174.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/3174.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 1443,
|
||||
title: {
|
||||
userPreferred: "SOL BIANCA",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/1443.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/1443.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/1443.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 153431,
|
||||
title: {
|
||||
userPreferred: "Onna no Sono no Hoshi",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx153431-DMBYQxagH3Uu.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx153431-DMBYQxagH3Uu.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx153431-DMBYQxagH3Uu.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 1444,
|
||||
title: {
|
||||
userPreferred: "Sol Bianca: Taiyou no Fune",
|
||||
english: "Sol Bianca: The Legacy",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx1444-7Yn6hmQ2bk9D.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx1444-7Yn6hmQ2bk9D.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx1444-7Yn6hmQ2bk9D.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 4138,
|
||||
title: {
|
||||
userPreferred: "Chiisana Pengin: Lolo no Bouken",
|
||||
english: "The Adventures of Scamper the Penguin",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/4138.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/4138.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/4138.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 164192,
|
||||
title: {
|
||||
userPreferred: "Planetarium",
|
||||
english: "Planetarium",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx164192-KQ8sYXbaAl6i.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx164192-KQ8sYXbaAl6i.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx164192-KQ8sYXbaAl6i.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 5838,
|
||||
title: {
|
||||
userPreferred: "Furudera no Obake-soudou",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b5838-QTe07RRZylUm.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b5838-QTe07RRZylUm.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/b5838-QTe07RRZylUm.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 162882,
|
||||
title: {
|
||||
userPreferred: "P.E.T.",
|
||||
english: "P.E.T.",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx162882-OQENM5pXn7QQ.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx162882-OQENM5pXn7QQ.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx162882-OQENM5pXn7QQ.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 102710,
|
||||
title: {
|
||||
userPreferred: "Kairaku no Sono",
|
||||
english: "The Garden of Pleasure",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/102710-dVayaOkzATwa.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/102710-dVayaOkzATwa.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/102710-dVayaOkzATwa.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 162881,
|
||||
title: {
|
||||
userPreferred: "Mosh Race",
|
||||
english: "Mosh Race",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx162881-c7xmNA6DlHFZ.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx162881-c7xmNA6DlHFZ.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx162881-c7xmNA6DlHFZ.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 5935,
|
||||
title: {
|
||||
userPreferred: "Marco Polo no Boken",
|
||||
english: "Marco Polo's Adventures",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/5935.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/5935.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/5935.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 103449,
|
||||
title: {
|
||||
userPreferred: "SOL",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/103449-FxDK8eJuMAKg.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/103449-FxDK8eJuMAKg.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/103449-FxDK8eJuMAKg.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 12993,
|
||||
title: {
|
||||
userPreferred: "Sono Mukou no Mukougawa",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/12993.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/12993.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/12993.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 20459,
|
||||
title: {
|
||||
userPreferred: "Ganbare! Lulu Lolo",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/20459.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/20459.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/20459.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 137760,
|
||||
title: {
|
||||
userPreferred: "Soko ni wa Mata Meikyuu",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b137760-CleNdfmuKRy7.png",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b137760-CleNdfmuKRy7.png",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/b137760-CleNdfmuKRy7.png",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 7473,
|
||||
title: {
|
||||
userPreferred: "Rennyo to Sono Haha",
|
||||
english: "Rennyo and His Mother",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/7473.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/7473.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/7473.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 21418,
|
||||
title: {
|
||||
userPreferred: "Ganbare! Lulu Lolo 3rd Season",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/21418-TZYwdItidowx.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/21418-TZYwdItidowx.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/21418-TZYwdItidowx.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 103517,
|
||||
title: {
|
||||
userPreferred: "Toute wa Sono Kotae",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/103517-XgOUryeFaPDW.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/103517-XgOUryeFaPDW.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/103517-XgOUryeFaPDW.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 113572,
|
||||
title: {
|
||||
userPreferred: "Sono Saki no Taniji",
|
||||
english: "Journey to the beyond",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b113572-hP9x1SkRJXvA.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b113572-hP9x1SkRJXvA.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/b113572-hP9x1SkRJXvA.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 20864,
|
||||
title: {
|
||||
userPreferred: "Ganbare! Lulu Lolo 2nd Season",
|
||||
english: null,
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/20864.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/20864.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/20864.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 15129,
|
||||
title: {
|
||||
userPreferred: "Tanpen Animation Junpei Fujita",
|
||||
english: "Short Animations of Junpei Fujita",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/15129.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/15129.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/15129.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 106557,
|
||||
title: {
|
||||
userPreferred: "Sono Ie no Namae",
|
||||
english: "A Place to Name",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/nx106557-TPLmwa2EccB9.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/nx106557-TPLmwa2EccB9.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/nx106557-TPLmwa2EccB9.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 118133,
|
||||
title: {
|
||||
userPreferred: "Guzu no Soko",
|
||||
english: "In Inertia",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b118133-y7RvDFmr30hZ.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/b118133-y7RvDFmr30hZ.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/b118133-y7RvDFmr30hZ.jpg",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 169686,
|
||||
title: {
|
||||
userPreferred: "Soto ni Denai hi",
|
||||
english: "Indoor Days",
|
||||
},
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx169686-exScHzB5UX2D.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx169686-exScHzB5UX2D.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx169686-exScHzB5UX2D.jpg",
|
||||
},
|
||||
},
|
||||
],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
70
src/mocks/anilist/title.ts
Normal file
70
src/mocks/anilist/title.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { HttpResponse, graphql } from "msw";
|
||||
|
||||
export function getAnilistTitle() {
|
||||
return graphql.query(
|
||||
"GetTitle",
|
||||
({ variables: { id }, request: { headers } }) => {
|
||||
console.log(
|
||||
`Intercepting GetTitle query with ID ${id} and Authorization header ${headers.get("authorization")}`,
|
||||
);
|
||||
|
||||
if (id === -1 || id === 50) {
|
||||
return HttpResponse.json({
|
||||
errors: [
|
||||
{
|
||||
message: "Not Found.",
|
||||
status: 404,
|
||||
locations: [
|
||||
{
|
||||
line: 2,
|
||||
column: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
data: {
|
||||
Media: null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json({
|
||||
data: {
|
||||
Media: {
|
||||
id: 135643,
|
||||
idMal: 49210,
|
||||
title: {
|
||||
english: "The Grimm Variations",
|
||||
userPreferred: "The Grimm Variations",
|
||||
},
|
||||
description:
|
||||
'Once upon a time, brothers Jacob and Wilhelm collected fairy tales from across the land and made them into a book. They also had a much younger sister, the innocent and curious Charlotte, who they loved very much. One day, while the brothers were telling Charlotte a fairy tale like usual, they saw that she had a somewhat melancholy look on her face. She asked them, "Do you suppose they really lived happily ever after?"\n<br><br>\nThe pages of Grimms\' Fairy Tales, written by Jacob and Wilhelm, are now presented from the unique perspective of Charlotte, who sees the stories quite differently from her brothers.\n<br><br>\n(Source: Netflix Anime)',
|
||||
episodes: 6,
|
||||
genres: ["Fantasy", "Thriller"],
|
||||
status: "FINISHED",
|
||||
bannerImage:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/banner/135643-cmQZCR3z9dB5.jpg",
|
||||
averageScore: 66,
|
||||
coverImage: {
|
||||
extraLarge:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx135643-2kJt86K9Db9P.jpg",
|
||||
large:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx135643-2kJt86K9Db9P.jpg",
|
||||
medium:
|
||||
"https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx135643-2kJt86K9Db9P.jpg",
|
||||
},
|
||||
countryOfOrigin: "JP",
|
||||
mediaListEntry: headers.has("authorization")
|
||||
? {
|
||||
id: 402665918,
|
||||
progress: 1,
|
||||
status: "CURRENT",
|
||||
}
|
||||
: null,
|
||||
nextAiringEpisode: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
37
src/mocks/anilist/updateWatchStatus.ts
Normal file
37
src/mocks/anilist/updateWatchStatus.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { HttpResponse, graphql } from "msw";
|
||||
|
||||
export function updateAnilistWatchStatus() {
|
||||
return graphql.mutation(
|
||||
"UpdateWatchStatus",
|
||||
({ variables: { titleId, watchStatus }, request: { headers } }) => {
|
||||
console.log(
|
||||
`Intercepting UpdateWatchStatus mutation with ID ${titleId}, watch status ${watchStatus} and Authorization header ${headers.get("authorization")}`,
|
||||
);
|
||||
|
||||
if (titleId === -1) {
|
||||
return HttpResponse.json({
|
||||
errors: [
|
||||
{
|
||||
message: "validation",
|
||||
status: 400,
|
||||
locations: [
|
||||
{
|
||||
line: 2,
|
||||
column: 2,
|
||||
},
|
||||
],
|
||||
validation: {
|
||||
mediaId: ["The selected media id is invalid."],
|
||||
},
|
||||
},
|
||||
],
|
||||
data: {
|
||||
SaveMediaListEntry: null,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json({ data: { id: titleId } });
|
||||
},
|
||||
);
|
||||
}
|
||||
31
src/mocks/aniwatch/episodes.ts
Normal file
31
src/mocks/aniwatch/episodes.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { HttpResponse, http } from "msw";
|
||||
|
||||
export function getAniwatchEpisodes() {
|
||||
return http.get(
|
||||
"https://aniwatch.up.railway.app/api/v2/hianime/anime/:aniListId/episodes",
|
||||
({ params }) => {
|
||||
const aniListId = Number(params["aniListId"]);
|
||||
if (aniListId === 4) {
|
||||
return HttpResponse.json({
|
||||
code: 200,
|
||||
message: "success",
|
||||
episodes: [
|
||||
{
|
||||
id: "aniwatch-1",
|
||||
episode: 1,
|
||||
title: "EP 1",
|
||||
isFiller: false,
|
||||
isDub: false,
|
||||
image: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json(
|
||||
{ code: 500, message: "Server error", episodes: [] },
|
||||
{ status: 500 },
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
512
src/mocks/aniwatch/search.ts
Normal file
512
src/mocks/aniwatch/search.ts
Normal file
@@ -0,0 +1,512 @@
|
||||
import { HttpResponse, http } from "msw";
|
||||
|
||||
export function getAniwatchSearchResults() {
|
||||
return http.get(
|
||||
"https://aniwatch.up.railway.app/api/v2/hianime/search",
|
||||
({ request }) => {
|
||||
const query = new URL(request.url).searchParams.get("query");
|
||||
|
||||
return HttpResponse.json({
|
||||
animes: [
|
||||
{
|
||||
id: "naruto-shippuden-355",
|
||||
name: "Naruto: Shippuden",
|
||||
jname: "Naruto: Shippuuden",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/9cbcf87f54194742e7686119089478f8.jpg",
|
||||
duration: "23m",
|
||||
type: "TV",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 500,
|
||||
dub: 500,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuden-the-movie-2306",
|
||||
name: "Naruto: Shippuden the Movie",
|
||||
jname: "Naruto: Shippuuden Movie 1",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/071ca93201eccc34a9e088013bc27807.jpg",
|
||||
duration: "94m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuden-the-movie-2-bonds-2346",
|
||||
name: "Naruto: Shippuden the Movie 2 -Bonds-",
|
||||
jname: "Naruto: Shippuuden Movie 2 - Kizuna",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/74a112674ab92212933e41cb532689a5.jpg",
|
||||
duration: "92m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-x-ut-1840",
|
||||
name: "Naruto x UT",
|
||||
jname: "Naruto x UT",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/be66602efedb73c4688e302303b0a422.jpg",
|
||||
duration: "6m",
|
||||
type: "OVA",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-677",
|
||||
name: "Naruto",
|
||||
jname: "Naruto",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/5db400c33f7494bc8ae96f9e634958d0.jpg",
|
||||
duration: "23m",
|
||||
type: "TV",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 220,
|
||||
dub: 220,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-the-movie-2-legend-of-the-stone-of-gelel-4004",
|
||||
name: "Naruto the Movie 2: Legend of the Stone of Gelel",
|
||||
jname:
|
||||
"Naruto Movie 2: Dai Gekitotsu! Maboroshi no Chiteiiseki Dattebayo!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/111f06edfffba5f46f5cac05db2a6bce.jpg",
|
||||
duration: "97m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "road-of-naruto-18220",
|
||||
name: "Road of Naruto",
|
||||
jname: "Road of Naruto",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/fd414879634ea83ad2c4fc1c33e8ac43.jpg",
|
||||
duration: "9m",
|
||||
type: "ONA",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuuden-movie-5-blood-prison-1642",
|
||||
name: "Naruto: Shippuuden Movie 5 - Blood Prison",
|
||||
jname: "Naruto: Shippuuden Movie 5 - Blood Prison",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/23a436a4ae640fa191a587b5e417bf7d.jpg",
|
||||
duration: "102m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "boruto-naruto-next-generations-8143",
|
||||
name: "Boruto: Naruto Next Generations",
|
||||
jname: "Boruto: Naruto Next Generations",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/32c83e2ad4a43229996356840db3982c.jpg",
|
||||
duration: "23m",
|
||||
type: "TV",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 293,
|
||||
dub: 273,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "boruto-naruto-the-movie-1391",
|
||||
name: "Boruto: Naruto the Movie",
|
||||
jname: "Boruto: Naruto the Movie",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/f0ad5b3ee01703cc817638973b535aa2.jpg",
|
||||
duration: "95m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuuden-movie-6-road-to-ninja-1066",
|
||||
name: "Naruto: Shippuuden Movie 6: Road to Ninja",
|
||||
jname: "Naruto: Shippuuden Movie 6 - Road to Ninja",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/dde4a8a8ddd19648711845448d02d6d8.jpg",
|
||||
duration: "109m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "the-last-naruto-the-movie-882",
|
||||
name: "The Last: Naruto the Movie",
|
||||
jname: "The Last: Naruto the Movie",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/8d42031c8f566e744d84de02d42466bc.jpg",
|
||||
duration: "112m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuuden-movie-3-inheritors-of-will-of-fire-2044",
|
||||
name: "Naruto Shippuuden Movie 3: Inheritors of Will of Fire",
|
||||
jname: "Naruto: Shippuuden Movie 3 - Hi no Ishi wo Tsugu Mono",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/1b9aad793b15265876f479c53ca7bfe1.jpg",
|
||||
duration: "95m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuuden-movie-4-the-lost-tower-1821",
|
||||
name: "Naruto: Shippuuden Movie 4 - The Lost Tower",
|
||||
jname: "Naruto: Shippuuden Movie 4 - The Lost Tower",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/68c5ae4e5b496eb0474920659a9a85e2.jpg",
|
||||
duration: "85m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "boruto-naruto-the-movie-the-day-naruto-became-the-hokage-1805",
|
||||
name: "Boruto: Naruto the Movie - The Day Naruto Became the Hokage",
|
||||
jname: "Boruto: Naruto the Movie - Naruto ga Hokage ni Natta Hi",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/b19c06fae70eab67b1f390ed3cd905d8.jpg",
|
||||
duration: "10m",
|
||||
type: "Special",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-the-movie-3-guardians-of-the-crescent-moon-kingdom-4005",
|
||||
name: "Naruto the Movie 3: Guardians of the Crescent Moon Kingdom",
|
||||
jname:
|
||||
"Naruto Movie 3: Dai Koufun! Mikazuki Jima no Animaru Panikku Dattebayo!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/73d003618cd260df44e93a5baf9acb56.jpg",
|
||||
duration: "94m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-find-the-crimson-four-leaf-clover-5694",
|
||||
name: "Naruto: Find the Crimson Four-leaf Clover!",
|
||||
jname: "Naruto: Akaki Yotsuba no Clover wo Sagase",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/da3a3d57e29aa0dba87cd6e1596b78e9.jpg",
|
||||
duration: "17m",
|
||||
type: "Special",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-ova5-naruto-the-genie-and-the-three-wishes-3657",
|
||||
name: "Naruto OVA5: Naruto, The Genie, and The Three Wishes!!",
|
||||
jname:
|
||||
"Naruto Soyokazeden Movie: Naruto to Mashin to Mitsu no Onegai Dattebayo!!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/57935a347fb4328e0132c76afdd85822.jpg",
|
||||
duration: "14m",
|
||||
type: "OVA",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-narutimate-hero-3-tsuini-gekitotsu-jounin-vs-genin-musabetsu-dairansen-taikai-kaisai-4485",
|
||||
name: "Naruto Narutimate Hero 3: Tsuini Gekitotsu! Jounin vs. Genin!! Musabetsu Dairansen Taikai Kaisai!!",
|
||||
jname:
|
||||
"Naruto Narutimate Hero 3: Tsuini Gekitotsu! Jounin vs. Genin!! Musabetsu Dairansen Taikai Kaisai!!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/939f107dc40ca24056b90c0b215bd475.jpg",
|
||||
duration: "26m",
|
||||
type: "OVA",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-ova7-chunin-exam-on-fire-and-naruto-vs-konohamaru-2928",
|
||||
name: "Naruto OVA7: Chunin Exam on Fire! and Naruto vs. Konohamaru!",
|
||||
jname: "Naruto: Honoo no Chuunin Shiken! Naruto vs. Konohamaru!!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/44246cdf24c85468599ff2b9496c27cb.jpg",
|
||||
duration: "14m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-the-movie-ninja-clash-in-the-land-of-snow-3162",
|
||||
name: "Naruto Movie 1: Ninja Clash in the Land of Snow",
|
||||
jname:
|
||||
"Naruto Movie 1: Dai Katsugeki!! Yuki Hime Shinobu Houjou Dattebayo!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/a1ab85f1eb75ec0a986e4c9d5fe04b49.jpg",
|
||||
duration: "82m",
|
||||
type: "Movie",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-ova9-sunny-side-battle-1916",
|
||||
name: "Naruto OVA9: Sunny Side Battle",
|
||||
jname: "Naruto: Shippuuden - Sunny Side Battle",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/94c836b7aff106f515b53f8eb440ccdf.jpg",
|
||||
duration: "11m",
|
||||
type: "Special",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-the-cross-roads-4291",
|
||||
name: "Naruto: The Cross Roads",
|
||||
jname: "Naruto: The Cross Roads",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/99d7c753d9535c0d91858e4dd2a8d939.jpg",
|
||||
duration: "27m",
|
||||
type: "Special",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-ova2-the-lost-story-mission-protect-the-waterfall-village-4538",
|
||||
name: "Naruto OVA2: The Lost Story - Mission: Protect the Waterfall Village",
|
||||
jname: "Naruto: Takigakure no Shitou - Ore ga Eiyuu Dattebayo!",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/ed2ca489d8c438c880056ea507efc93c.jpg",
|
||||
duration: "40m",
|
||||
type: "Special",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-ova3-hidden-leaf-village-grand-sports-festival-4136",
|
||||
name: "Naruto OVA3: Hidden Leaf Village Grand Sports Festival",
|
||||
jname:
|
||||
"Naruto: Dai Katsugeki!! Yuki Hime Shinobu Houjou Dattebayo! - Konoha no Sato no Dai Undoukai",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/b4bb0d2caaa9591fdb3c442738d7f87a.jpg",
|
||||
duration: "11m",
|
||||
type: "Special",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 1,
|
||||
dub: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "naruto-spin-off-rock-lee-his-ninja-pals-2992",
|
||||
name: "NARUTO Spin-Off: Rock Lee & His Ninja Pals",
|
||||
jname: "Naruto SD: Rock Lee no Seishun Full-Power Ninden",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/37f8b16b0f693e433207117abe5daf44.jpg",
|
||||
duration: "24m",
|
||||
type: "TV",
|
||||
rating: null,
|
||||
episodes: {
|
||||
sub: 51,
|
||||
dub: 51,
|
||||
},
|
||||
},
|
||||
],
|
||||
mostPopularAnimes: [
|
||||
{
|
||||
id: "one-piece-100",
|
||||
name: "One Piece",
|
||||
jname: "One Piece",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/bcd84731a3eda4f4a306250769675065.jpg",
|
||||
episodes: {
|
||||
sub: 1116,
|
||||
dub: 1085,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "naruto-shippuden-355",
|
||||
name: "Naruto: Shippuden",
|
||||
jname: "Naruto: Shippuuden",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/9cbcf87f54194742e7686119089478f8.jpg",
|
||||
episodes: {
|
||||
sub: 500,
|
||||
dub: 500,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "jujutsu-kaisen-2nd-season-18413",
|
||||
name: "Jujutsu Kaisen 2nd Season",
|
||||
jname: "Jujutsu Kaisen 2nd Season",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/b51f863b05f30576cf9d85fa9b911bb5.png",
|
||||
episodes: {
|
||||
sub: 23,
|
||||
dub: 23,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "bleach-806",
|
||||
name: "Bleach",
|
||||
jname: "Bleach",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/bd5ae1d387a59c5abcf5e1a6a616728c.jpg",
|
||||
episodes: {
|
||||
sub: 366,
|
||||
dub: 366,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "black-clover-2404",
|
||||
name: "Black Clover",
|
||||
jname: "Black Clover",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/f58b0204c20ae3310f65ae7b8cb9987e.jpg",
|
||||
episodes: {
|
||||
sub: 170,
|
||||
dub: 170,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "demon-slayer-kimetsu-no-yaiba-swordsmith-village-arc-18056",
|
||||
name: "Demon Slayer: Kimetsu no Yaiba Swordsmith Village Arc",
|
||||
jname: "Kimetsu no Yaiba: Katanakaji no Sato-hen",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/db2f3ce7b9cab7fdc160b005bffb899a.png",
|
||||
episodes: {
|
||||
sub: 11,
|
||||
dub: 11,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "boruto-naruto-next-generations-8143",
|
||||
name: "Boruto: Naruto Next Generations",
|
||||
jname: "Boruto: Naruto Next Generations",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/32c83e2ad4a43229996356840db3982c.jpg",
|
||||
episodes: {
|
||||
sub: 293,
|
||||
dub: 273,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "naruto-677",
|
||||
name: "Naruto",
|
||||
jname: "Naruto",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/5db400c33f7494bc8ae96f9e634958d0.jpg",
|
||||
episodes: {
|
||||
sub: 220,
|
||||
dub: 220,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "jujutsu-kaisen-tv-534",
|
||||
name: "Jujutsu Kaisen (TV)",
|
||||
jname: "Jujutsu Kaisen (TV)",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/82402f796b7d84d7071ab1e03ff7747a.jpg",
|
||||
episodes: {
|
||||
sub: 24,
|
||||
dub: 24,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
{
|
||||
id: "spy-x-family-17977",
|
||||
name: "Spy x Family",
|
||||
jname: "Spy x Family",
|
||||
poster:
|
||||
"https://cdn.noitatnemucod.net/thumbnail/300x400/100/88bd17534dc4884f23027035d23d74e5.jpg",
|
||||
episodes: {
|
||||
sub: 12,
|
||||
dub: 12,
|
||||
},
|
||||
type: "TV",
|
||||
},
|
||||
],
|
||||
currentPage: 1,
|
||||
hasNextPage: false,
|
||||
totalPages: 1,
|
||||
searchQuery: "naruto-shippuden-355",
|
||||
searchFilters: {},
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
53
src/mocks/aniwatch/sources.ts
Normal file
53
src/mocks/aniwatch/sources.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { HttpResponse, http } from "msw";
|
||||
|
||||
export function getAniwatchSources() {
|
||||
return http.get(
|
||||
"https://aniwatch.up.railway.app/api/v2/hianime/episode/sources",
|
||||
({ request }) => {
|
||||
const url = new URL(request.url);
|
||||
const id = url.searchParams.get("id");
|
||||
|
||||
if (id === "unknown") {
|
||||
return HttpResponse.json(
|
||||
{
|
||||
code: 404,
|
||||
message:
|
||||
"The requested resource could not be found but may be available in the future. Subsequent requests by the client are permissible.",
|
||||
},
|
||||
{ status: 404 },
|
||||
);
|
||||
}
|
||||
|
||||
return HttpResponse.json({
|
||||
tracks: [
|
||||
{
|
||||
file: "https://s.megastatics.com/subtitle/4ea42fb35b93b7a2d8e69ca8fe55c0e5/eng-2.vtt",
|
||||
label: "English",
|
||||
kind: "captions",
|
||||
default: true,
|
||||
},
|
||||
{
|
||||
file: "https://s.megastatics.com/thumbnails/be7d997958cdf9b9444d910c2c28645e/thumbnails.vtt",
|
||||
kind: "thumbnails",
|
||||
},
|
||||
],
|
||||
intro: {
|
||||
start: 258,
|
||||
end: 347,
|
||||
},
|
||||
outro: {
|
||||
start: 1335,
|
||||
end: 1424,
|
||||
},
|
||||
sources: [
|
||||
{
|
||||
url: "https://vd2.biananset.net/_v7/26c0c3f5b635f5b9153fca5d43037bb06875d79b3f1528ca69ac83f8e14c90a48cce237316cbf6fa12de243f1dca5118b8dbb767aff155b79ad687a75905004314bee838cdbd8bea083910d6f660f3e29ebb5bb3e48dd9b30816c31737fc8fdf9dd123a7ea937c5594fb9daf540e6a4e6aecef840e23f0fe9cfe20638e3467a2/master.m3u8",
|
||||
type: "hls",
|
||||
},
|
||||
],
|
||||
anilistID: 153406,
|
||||
malID: 52635,
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
24
src/mocks/cloudflare.ts
Normal file
24
src/mocks/cloudflare.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { mock } from "bun:test";
|
||||
|
||||
mock.module("cloudflare:workers", () => ({
|
||||
env: {
|
||||
ADMIN_SDK_JSON: JSON.stringify({
|
||||
type: "service_account",
|
||||
projectId: "test-26g38",
|
||||
privateKeyId: "privateKeyId",
|
||||
privateKey: "privateKey",
|
||||
clientEmail: "test@test.com",
|
||||
clientID: "clientId",
|
||||
authURI: "https://accounts.google.com/o/oauth2/auth",
|
||||
tokenURI: "https://oauth2.googleapis.com/token",
|
||||
authProviderX509CertUrl: "https://www.googleapis.com/oauth2/v1/certs",
|
||||
clientX509CertUrl:
|
||||
"https://www.googleapis.com/robot/v1/metadata/x509/test%40test.com",
|
||||
universeDomain: "aniplay.com",
|
||||
}),
|
||||
ANIWATCH_URL: "https://aniwatch.to",
|
||||
CONSUMET_URL: "https://api.consumet.org",
|
||||
ANILIST_URL: "https://graphql.anilist.co",
|
||||
GOOGLE_AUTH_URL: "https://www.googleapis.com/oauth2/v4/token",
|
||||
},
|
||||
}));
|
||||
35
src/mocks/consumet.ts
Normal file
35
src/mocks/consumet.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { IAnimeEpisode, ISource } from "@consumet/extensions";
|
||||
|
||||
import { mock } from "bun:test";
|
||||
|
||||
mock.module("src/consumet", () => ({
|
||||
aniList: {
|
||||
fetchEpisodesListById(
|
||||
id: string,
|
||||
dub?: boolean | undefined,
|
||||
fetchFiller?: boolean | undefined,
|
||||
): Promise<IAnimeEpisode[]> {
|
||||
if (id === "3") {
|
||||
return Promise.resolve([
|
||||
{
|
||||
id: "consumet-1",
|
||||
number: 1,
|
||||
title: "Consumet 1",
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return Promise.resolve([]);
|
||||
},
|
||||
fetchEpisodeSources(episodeId: string, ...args: any): Promise<ISource> {
|
||||
if (episodeId === "unknown") {
|
||||
return Promise.resolve({ sources: [] });
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
sources: [{ url: "https://consumet.com" }],
|
||||
subtitles: [],
|
||||
});
|
||||
},
|
||||
},
|
||||
}));
|
||||
76
src/mocks/gcloud.ts
Normal file
76
src/mocks/gcloud.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { HttpResponse, http } from "msw";
|
||||
|
||||
import type { FcmMessagePayload } from "~/libs/gcloud/sendFcmMessage";
|
||||
|
||||
export function mockFcmMessageResponse() {
|
||||
return http.post<{}, { message: FcmMessagePayload; validate_only: boolean }>(
|
||||
"https://fcm.googleapis.com/v1/projects/test-26g38/messages:send",
|
||||
async ({ request }) => {
|
||||
const { message } = await request.json();
|
||||
const { name, token } = message;
|
||||
|
||||
if (name === "token_verification") {
|
||||
if (token?.length === 163) {
|
||||
return HttpResponse.json({ name });
|
||||
}
|
||||
|
||||
return HttpResponse.json({
|
||||
error: {
|
||||
code: 400,
|
||||
message:
|
||||
"The registration token is not a valid FCM registration token",
|
||||
status: "INVALID_ARGUMENT",
|
||||
details: [
|
||||
{
|
||||
"@type": "type.googleapis.com/google.firebase.fcm.v1.FcmError",
|
||||
errorCode: "INVALID_ARGUMENT",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json(message);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export function mockCreateGcloudTask() {
|
||||
return http.post<{}, { task: { name: string } }>(
|
||||
"https://content-cloudtasks.googleapis.com/v2/projects/test-26g38/locations/northamerica-northeast1/queues/new-episode/tasks",
|
||||
async ({ request }) => {
|
||||
const {
|
||||
task: { name },
|
||||
} = await request.json();
|
||||
return HttpResponse.json({ name });
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
export function mockDeleteGcloudTask() {
|
||||
return http.delete<{ taskId: string }>(
|
||||
"https://content-cloudtasks.googleapis.com/v2/projects/test-26g38/locations/northamerica-northeast1/queues/new-episode/tasks/:taskId",
|
||||
async ({ params }) => {
|
||||
const { taskId } = params;
|
||||
|
||||
if (taskId === "123") {
|
||||
return HttpResponse.json({
|
||||
error: {
|
||||
code: 404,
|
||||
message: "Task not found",
|
||||
status: "NOT_FOUND",
|
||||
details: [
|
||||
{
|
||||
"@type": "type.googleapis.com/google.rpc.Status",
|
||||
code: 5,
|
||||
message: "Task not found",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return HttpResponse.json({ messageId: "123" });
|
||||
},
|
||||
);
|
||||
}
|
||||
41
src/mocks/getGoogleAuthToken.ts
Normal file
41
src/mocks/getGoogleAuthToken.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { TokenOptions } from "gtoken";
|
||||
|
||||
import { mock } from "bun:test";
|
||||
|
||||
import type { AdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
|
||||
|
||||
const emailRegex =
|
||||
/^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|.(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
|
||||
|
||||
class MockGoogleToken {
|
||||
private email: string | undefined;
|
||||
|
||||
constructor(options: TokenOptions) {
|
||||
this.email = options.email;
|
||||
}
|
||||
|
||||
getToken() {
|
||||
console.log("getToken", this.email);
|
||||
if (!this.email) {
|
||||
return Promise.reject("No email provided");
|
||||
}
|
||||
|
||||
if (!emailRegex.test(this.email)) {
|
||||
return Promise.reject("Invalid email");
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
access_token: "asd",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
mock.module("src/libs/gcloud/getGoogleAuthToken", () => {
|
||||
return {
|
||||
getGoogleAuthToken: (adminSdkJson: AdminSdkCredentials) => {
|
||||
return new MockGoogleToken({
|
||||
email: adminSdkJson.clientEmail,
|
||||
}).getToken();
|
||||
},
|
||||
};
|
||||
});
|
||||
29
src/mocks/handlers.ts
Normal file
29
src/mocks/handlers.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { deleteAnilistMediaListEntry } from "./anilist/deleteMediaListEntry";
|
||||
import { getAnilistMediaListEntry } from "./anilist/mediaListEntry";
|
||||
import { getAnilistNextAiringEpisode } from "./anilist/nextAiringEpisode";
|
||||
import { getAnilistSearchResults } from "./anilist/search";
|
||||
import { getAnilistTitle } from "./anilist/title";
|
||||
import { updateAnilistWatchStatus } from "./anilist/updateWatchStatus";
|
||||
import { getAniwatchEpisodes } from "./aniwatch/episodes";
|
||||
import { getAniwatchSearchResults } from "./aniwatch/search";
|
||||
import { getAniwatchSources } from "./aniwatch/sources";
|
||||
import {
|
||||
mockCreateGcloudTask,
|
||||
mockDeleteGcloudTask,
|
||||
mockFcmMessageResponse,
|
||||
} from "./gcloud";
|
||||
|
||||
export const handlers = [
|
||||
deleteAnilistMediaListEntry(),
|
||||
getAnilistMediaListEntry(),
|
||||
getAnilistNextAiringEpisode(),
|
||||
getAnilistSearchResults(),
|
||||
getAnilistTitle(),
|
||||
updateAnilistWatchStatus(),
|
||||
getAniwatchEpisodes(),
|
||||
getAniwatchSearchResults(),
|
||||
getAniwatchSources(),
|
||||
mockCreateGcloudTask(),
|
||||
mockDeleteGcloudTask(),
|
||||
mockFcmMessageResponse(),
|
||||
];
|
||||
5
src/mocks/index.ts
Normal file
5
src/mocks/index.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { setupServer } from "msw/node";
|
||||
|
||||
import { handlers } from "./handlers";
|
||||
|
||||
export const server = setupServer(...handlers);
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { Episode } from "~/types/episode";
|
||||
import type { FetchUrlResponseSchema } from "~/types/episode/fetch-url-response";
|
||||
import type { Title } from "~/types/title";
|
||||
import type { HomeTitle } from "~/types/title/homeTitle";
|
||||
@@ -92,12 +93,13 @@ export const mockEpisodeUrl: FetchUrlResponseSchema = {
|
||||
* Mock data for episodes list
|
||||
* Returns a sample list of 50 episodes for testing
|
||||
*/
|
||||
export const mockEpisodes = () => {
|
||||
export const mockEpisodes: () => Episode[] = () => {
|
||||
const randomId = Math.floor(Math.random() * 1000000);
|
||||
return Array.from({ length: 50 }, (_, i) => ({
|
||||
id: `${randomId}-episode-${i + 1}`,
|
||||
number: i + 1,
|
||||
title: `Episode ${i + 1}`,
|
||||
isFiller: false,
|
||||
updatedAt: 0,
|
||||
}));
|
||||
};
|
||||
@@ -1,9 +1,15 @@
|
||||
// import { createClient } from "@libsql/client";
|
||||
import { env as cloudflareEnv } from "cloudflare:workers";
|
||||
import { drizzle } from "drizzle-orm/d1";
|
||||
|
||||
type Db = ReturnType<typeof drizzle>;
|
||||
// let db: Db | null = null;
|
||||
|
||||
export function getDb(env: Cloudflare.Env = cloudflareEnv): Db {
|
||||
const db = drizzle(env.DB, { logger: env.LOG_DB_QUERIES == "true" });
|
||||
// if (db) {
|
||||
// return db;
|
||||
// }
|
||||
|
||||
const db = drizzle(env.DB, { logger: true });
|
||||
return db;
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ export const watchStatusTable = sqliteTable(
|
||||
|
||||
export const keyValueTable = sqliteTable("key_value", {
|
||||
key: text("key", {
|
||||
enum: ["schedule_last_checked_at", "anify_killswitch_till"],
|
||||
enum: ["schedule_last_checked_at"],
|
||||
}).primaryKey(),
|
||||
value: text("value").notNull(),
|
||||
});
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
import { env } from "cloudflare:test";
|
||||
import { eq } from "drizzle-orm";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
import { getTestDb } from "~/libs/test/getTestDb";
|
||||
import { resetTestDb } from "~/libs/test/resetTestDb";
|
||||
|
||||
import { deviceTokensTable, watchStatusTable } from "./schema";
|
||||
|
||||
vi.mock("cloudflare:workers", () => ({ env: {} }));
|
||||
|
||||
describe("watchStatus model", () => {
|
||||
const db = getTestDb(env);
|
||||
let setWatchStatus: any;
|
||||
let isWatchingTitle: any;
|
||||
|
||||
beforeEach(async () => {
|
||||
await resetTestDb(db);
|
||||
vi.resetModules();
|
||||
|
||||
vi.doMock("./db", () => ({
|
||||
getDb: () => db,
|
||||
}));
|
||||
|
||||
// Seed devices to satisfy foreign key constraints
|
||||
await db.insert(deviceTokensTable).values([
|
||||
{ deviceId: "device-1", token: "token-1" },
|
||||
{ deviceId: "device-2", token: "token-2" },
|
||||
{ deviceId: "device-X", token: "token-X" },
|
||||
]);
|
||||
|
||||
const mod = await import("./watchStatus");
|
||||
setWatchStatus = mod.setWatchStatus;
|
||||
isWatchingTitle = mod.isWatchingTitle;
|
||||
});
|
||||
|
||||
it("should add watch status if CURRENT", async () => {
|
||||
const result = await setWatchStatus("device-1", 100, "CURRENT");
|
||||
expect(result.wasAdded).toBe(true);
|
||||
expect(result.wasDeleted).toBe(false);
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(watchStatusTable)
|
||||
.where(eq(watchStatusTable.titleId, 100));
|
||||
expect(rows).toHaveLength(1);
|
||||
expect(rows[0]).toEqual({ deviceId: "device-1", titleId: 100 });
|
||||
});
|
||||
|
||||
it("should add watch status if PLANNING", async () => {
|
||||
const result = await setWatchStatus("device-1", 101, "PLANNING");
|
||||
expect(result.wasAdded).toBe(true);
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(watchStatusTable)
|
||||
.where(eq(watchStatusTable.titleId, 101));
|
||||
expect(rows).toHaveLength(1);
|
||||
});
|
||||
|
||||
it("should remove watch status if null", async () => {
|
||||
// Setup
|
||||
await setWatchStatus("device-1", 102, "CURRENT");
|
||||
|
||||
const result = await setWatchStatus("device-1", 102, null);
|
||||
expect(result.wasAdded).toBe(false);
|
||||
expect(result.wasDeleted).toBe(true);
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(watchStatusTable)
|
||||
.where(eq(watchStatusTable.titleId, 102));
|
||||
expect(rows).toHaveLength(0);
|
||||
});
|
||||
|
||||
it("should effectively handle multiple devices watching same title", async () => {
|
||||
await setWatchStatus("device-1", 103, "CURRENT");
|
||||
await setWatchStatus("device-2", 103, "CURRENT");
|
||||
|
||||
// Remove device-1
|
||||
const result = await setWatchStatus("device-1", 103, null);
|
||||
expect(result.wasDeleted).toBe(false); // Because device-2 is still watching (count 1)
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(watchStatusTable)
|
||||
.where(eq(watchStatusTable.titleId, 103));
|
||||
expect(rows).toHaveLength(1);
|
||||
expect(rows[0].deviceId).toBe("device-2");
|
||||
});
|
||||
|
||||
it("isWatchingTitle checks if any user is watching", async () => {
|
||||
expect(await isWatchingTitle(200)).toBe(false);
|
||||
|
||||
await setWatchStatus("device-X", 200, "CURRENT");
|
||||
|
||||
expect(await isWatchingTitle(200)).toBe(true);
|
||||
});
|
||||
});
|
||||
47
src/resolvers/image.ts
Normal file
47
src/resolvers/image.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { encode } from "blurhash";
|
||||
import type { UintArrRet } from "jpeg-js";
|
||||
import type { PNGWithMetadata } from "pngjs";
|
||||
|
||||
export async function imageResolver(
|
||||
parent:
|
||||
| string
|
||||
| null
|
||||
| undefined
|
||||
| { extraLarge?: string; large?: string; medium?: string },
|
||||
) {
|
||||
const imageUrl =
|
||||
typeof parent === "string"
|
||||
? parent
|
||||
: (parent?.extraLarge ?? parent?.large ?? parent?.medium);
|
||||
if (!imageUrl) {
|
||||
return { url: imageUrl };
|
||||
}
|
||||
|
||||
return {
|
||||
url: imageUrl,
|
||||
placeholder: await generateImagePlaceholder(imageUrl),
|
||||
};
|
||||
}
|
||||
|
||||
async function generateImagePlaceholder(imageUrl: string) {
|
||||
const imageBuffer = await fetch(imageUrl).then((res) => res.arrayBuffer());
|
||||
let pixels: PNGWithMetadata | UintArrRet;
|
||||
|
||||
if (imageUrl.endsWith(".png")) {
|
||||
const { PNG } = await import("pngjs");
|
||||
pixels = PNG.sync.read(Buffer.from(imageBuffer));
|
||||
} else if (imageUrl.endsWith(".jpg")) {
|
||||
const jpeg = await import("jpeg-js");
|
||||
pixels = jpeg.decode(imageBuffer, { formatAsRGBA: true, useTArray: true });
|
||||
} else {
|
||||
throw new Error(`Unsupported image format: ${imageUrl.split(".").pop()}`);
|
||||
}
|
||||
|
||||
return encode(
|
||||
new Uint8ClampedArray(pixels.data),
|
||||
pixels.width,
|
||||
pixels.height,
|
||||
4,
|
||||
3,
|
||||
);
|
||||
}
|
||||
29
src/resolvers/index.ts
Normal file
29
src/resolvers/index.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { markEpisodeAsWatchedMutation } from "./mutations/markEpisodeAsWatched";
|
||||
import { updateTokenMutation } from "./mutations/updateToken";
|
||||
import { updateWatchStatusMutation } from "./mutations/updateWatchStatus";
|
||||
import { episodeStream } from "./queries/episodeStream";
|
||||
import { healthCheck } from "./queries/healthCheck";
|
||||
import { popularBrowse } from "./queries/popularBrowse";
|
||||
import { popularByCategory } from "./queries/popularByCategory";
|
||||
import { search } from "./queries/search";
|
||||
import { title } from "./queries/title";
|
||||
import { user } from "./queries/user";
|
||||
import { Title } from "./title";
|
||||
|
||||
export const resolvers = {
|
||||
Query: {
|
||||
healthCheck,
|
||||
title,
|
||||
search,
|
||||
popularBrowse,
|
||||
popularByCategory,
|
||||
episodeStream,
|
||||
user,
|
||||
},
|
||||
Mutation: {
|
||||
updateWatchStatus: updateWatchStatusMutation,
|
||||
markEpisodeAsWatched: markEpisodeAsWatchedMutation,
|
||||
updateToken: updateTokenMutation,
|
||||
},
|
||||
Title,
|
||||
};
|
||||
65
src/resolvers/mutations/markEpisodeAsWatched.ts
Normal file
65
src/resolvers/mutations/markEpisodeAsWatched.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { markEpisodeAsWatched } from "~/services/episodes/markEpisodeAsWatched/anilist";
|
||||
|
||||
interface MarkEpisodeAsWatchedInput {
|
||||
titleId: number;
|
||||
episodeNumber: number;
|
||||
isComplete: boolean;
|
||||
}
|
||||
|
||||
interface MarkEpisodeAsWatchedArgs {
|
||||
input: MarkEpisodeAsWatchedInput;
|
||||
}
|
||||
|
||||
export async function markEpisodeAsWatchedMutation(
|
||||
_parent: unknown,
|
||||
args: MarkEpisodeAsWatchedArgs,
|
||||
context: GraphQLContext,
|
||||
) {
|
||||
const { input } = args;
|
||||
const { aniListToken } = context;
|
||||
|
||||
if (!aniListToken) {
|
||||
throw new GraphQLError(
|
||||
"AniList token is required. Please provide X-AniList-Token header.",
|
||||
{
|
||||
extensions: { code: "UNAUTHORIZED" },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const user = await markEpisodeAsWatched(
|
||||
aniListToken,
|
||||
input.titleId,
|
||||
input.episodeNumber,
|
||||
input.isComplete,
|
||||
);
|
||||
|
||||
if (input.isComplete) {
|
||||
if (context.deviceId) {
|
||||
const { updateWatchStatus } = await import("~/services/watch-status");
|
||||
await updateWatchStatus(context.deviceId, input.titleId, "COMPLETED");
|
||||
} else {
|
||||
console.warn(
|
||||
"Device ID not found in context, skipping watch status update",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
throw new GraphQLError("Failed to mark episode as watched", {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error marking episode as watched:", error);
|
||||
throw new GraphQLError("Failed to mark episode as watched", {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
}
|
||||
30
src/resolvers/mutations/updateToken.ts
Normal file
30
src/resolvers/mutations/updateToken.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { getAdminSdkCredentials } from "~/libs/gcloud/getAdminSdkCredentials";
|
||||
import { verifyFcmToken } from "~/libs/gcloud/verifyFcmToken";
|
||||
import { saveToken } from "~/models/token";
|
||||
|
||||
export async function updateTokenMutation(
|
||||
_parent: unknown,
|
||||
args: { token: string },
|
||||
context: GraphQLContext,
|
||||
) {
|
||||
const { deviceId } = context;
|
||||
|
||||
try {
|
||||
const isValidToken = await verifyFcmToken(
|
||||
args.token,
|
||||
getAdminSdkCredentials(),
|
||||
);
|
||||
if (!isValidToken) {
|
||||
return false;
|
||||
}
|
||||
|
||||
await saveToken(deviceId, args.token);
|
||||
} catch (error) {
|
||||
console.error("Failed to save token");
|
||||
console.error(error);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
43
src/resolvers/mutations/updateWatchStatus.ts
Normal file
43
src/resolvers/mutations/updateWatchStatus.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { updateWatchStatus } from "~/services/watch-status";
|
||||
import type { WatchStatus } from "~/types/title/watchStatus";
|
||||
|
||||
interface UpdateWatchStatusInput {
|
||||
titleId: number;
|
||||
watchStatus: WatchStatus | null;
|
||||
}
|
||||
|
||||
interface UpdateWatchStatusArgs {
|
||||
input: UpdateWatchStatusInput;
|
||||
}
|
||||
|
||||
export async function updateWatchStatusMutation(
|
||||
_parent: unknown,
|
||||
args: UpdateWatchStatusArgs,
|
||||
context: GraphQLContext,
|
||||
) {
|
||||
const { input } = args;
|
||||
const { deviceId } = context;
|
||||
|
||||
if (!deviceId) {
|
||||
throw new GraphQLError(
|
||||
"Device ID is required. Please provide X-Device-ID header.",
|
||||
{
|
||||
extensions: { code: "BAD_REQUEST" },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
await updateWatchStatus(deviceId, input.titleId, input.watchStatus);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error("Error updating watch status:", error);
|
||||
throw new GraphQLError("Failed to update watch status", {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
}
|
||||
15
src/resolvers/queries/episodeStream.ts
Normal file
15
src/resolvers/queries/episodeStream.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { fetchEpisodeUrl } from "~/services/episodes/getEpisodeUrl";
|
||||
|
||||
export async function episodeStream(
|
||||
_parent: unknown,
|
||||
args: { id: string },
|
||||
context: GraphQLContext,
|
||||
) {
|
||||
const episodeUrl = await fetchEpisodeUrl({ id: args.id });
|
||||
if (!episodeUrl || !episodeUrl.success) {
|
||||
throw new Error("Failed to fetch episode URL");
|
||||
}
|
||||
|
||||
return { ...episodeUrl.result, url: episodeUrl.result.source };
|
||||
}
|
||||
9
src/resolvers/queries/healthCheck.ts
Normal file
9
src/resolvers/queries/healthCheck.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import type { GraphQLContext } from "~/context";
|
||||
|
||||
export function healthCheck(
|
||||
_parent: unknown,
|
||||
_args: unknown,
|
||||
_context: GraphQLContext,
|
||||
): boolean {
|
||||
return true;
|
||||
}
|
||||
45
src/resolvers/queries/home.ts
Normal file
45
src/resolvers/queries/home.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { env } from "cloudflare:workers";
|
||||
import { graphql } from "gql.tada";
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/graph~/context";
|
||||
import { MediaFragment } from "~/types/title/mediaFragment";
|
||||
|
||||
enum HomeCategory {
|
||||
WATCHING,
|
||||
PLANNING,
|
||||
}
|
||||
|
||||
export async function home(
|
||||
_parent: any,
|
||||
args: { category: HomeCategory; page?: number },
|
||||
context: GraphQLContext,
|
||||
) {
|
||||
const { category, page = 1 } = args;
|
||||
const { user, aniListToken } = context;
|
||||
let statusFilters: string[] = [];
|
||||
switch (category) {
|
||||
case HomeCategory.WATCHING:
|
||||
statusFilters = ["CURRENT"];
|
||||
break;
|
||||
case HomeCategory.PLANNING:
|
||||
statusFilters = ["PLANNING", "PAUSED", "REPEATING"];
|
||||
break;
|
||||
}
|
||||
|
||||
const stub = await env.ANILIST_DO.getByName("GLOBAL");
|
||||
const response = await stub.getTitles(
|
||||
user?.name,
|
||||
page,
|
||||
statusFilters,
|
||||
aniListToken,
|
||||
);
|
||||
|
||||
if (!response) {
|
||||
throw new GraphQLError(`Failed to fetch ${category} titles`, {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
30
src/resolvers/queries/popularBrowse.ts
Normal file
30
src/resolvers/queries/popularBrowse.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { fetchPopularTitlesFromAnilist } from "~/services/popular/browse/anilist";
|
||||
|
||||
interface PopularBrowseArgs {
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export async function popularBrowse(
|
||||
_parent: unknown,
|
||||
args: PopularBrowseArgs,
|
||||
_context: GraphQLContext,
|
||||
) {
|
||||
const { limit = 10 } = args;
|
||||
|
||||
const response = await fetchPopularTitlesFromAnilist(limit);
|
||||
|
||||
if (!response) {
|
||||
throw new GraphQLError("Failed to fetch popular titles", {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
trending: response.trending || [],
|
||||
popular: response.popular || [],
|
||||
upcoming: response.upcoming || [],
|
||||
};
|
||||
}
|
||||
32
src/resolvers/queries/popularByCategory.ts
Normal file
32
src/resolvers/queries/popularByCategory.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { fetchPopularTitlesFromAnilist } from "~/services/popular/category/anilist";
|
||||
import type { PopularCategory } from "~/services/popular/category/enum";
|
||||
|
||||
interface PopularByCategoryArgs {
|
||||
category: PopularCategory;
|
||||
page?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export async function popularByCategory(
|
||||
_parent: unknown,
|
||||
args: PopularByCategoryArgs,
|
||||
_context: GraphQLContext,
|
||||
) {
|
||||
const { category, page = 1, limit = 10 } = args;
|
||||
|
||||
const response = await fetchPopularTitlesFromAnilist(category, page, limit);
|
||||
|
||||
if (!response) {
|
||||
throw new GraphQLError(`Failed to fetch ${category} titles`, {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
results: response.results || [],
|
||||
hasNextPage: response.hasNextPage ?? false,
|
||||
};
|
||||
}
|
||||
29
src/resolvers/queries/search.ts
Normal file
29
src/resolvers/queries/search.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { fetchSearchResultsFromAnilist } from "~/services/search/anilist";
|
||||
|
||||
interface SearchArgs {
|
||||
query: string;
|
||||
page?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export async function search(
|
||||
_parent: unknown,
|
||||
args: SearchArgs,
|
||||
_context: GraphQLContext,
|
||||
) {
|
||||
const { query, page = 1, limit = 10 } = args;
|
||||
|
||||
const response = await fetchSearchResultsFromAnilist(query, page, limit);
|
||||
if (!response) {
|
||||
return {
|
||||
results: [],
|
||||
hasNextPage: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
results: response.results || [],
|
||||
hasNextPage: response.hasNextPage ?? false,
|
||||
};
|
||||
}
|
||||
32
src/resolvers/queries/title.ts
Normal file
32
src/resolvers/queries/title.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { fetchTitleFromAnilist } from "~/libs/anilist/getTitle";
|
||||
|
||||
interface TitleArgs {
|
||||
id: number;
|
||||
}
|
||||
|
||||
export async function title(
|
||||
_parent: unknown,
|
||||
args: TitleArgs,
|
||||
context: GraphQLContext,
|
||||
) {
|
||||
const { id } = args;
|
||||
const { aniListToken } = context;
|
||||
|
||||
// Fetch title
|
||||
const titleData = await fetchTitleFromAnilist(id, aniListToken);
|
||||
|
||||
if (!titleData) {
|
||||
throw new GraphQLError(`Title with id ${id} not found`, {
|
||||
extensions: { code: "NOT_FOUND" },
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...titleData,
|
||||
title: titleData.title?.userPreferred ?? titleData.title?.english,
|
||||
numEpisodes: titleData.episodes,
|
||||
};
|
||||
}
|
||||
22
src/resolvers/queries/user.ts
Normal file
22
src/resolvers/queries/user.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { GraphQLError } from "graphql";
|
||||
|
||||
import type { GraphQLContext } from "~/context";
|
||||
import { getUser } from "~/services/auth/anilist/getUser";
|
||||
|
||||
export async function user(_parent: any, _args: {}, context: GraphQLContext) {
|
||||
const { aniListToken } = context;
|
||||
if (!aniListToken) {
|
||||
throw new GraphQLError("Unauthorized", {
|
||||
extensions: { code: "UNAUTHORIZED" },
|
||||
});
|
||||
}
|
||||
|
||||
const response = await getUser(aniListToken);
|
||||
if (!response) {
|
||||
throw new GraphQLError(`Failed to fetch user`, {
|
||||
extensions: { code: "INTERNAL_SERVER_ERROR" },
|
||||
});
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
12
src/resolvers/title.ts
Normal file
12
src/resolvers/title.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { fetchEpisodes } from "~/services/episodes/getByAniListId";
|
||||
import type { Title as TitleType } from "~/types/title";
|
||||
|
||||
import { imageResolver } from "./image";
|
||||
|
||||
export const Title = {
|
||||
episodes: async (parent: { id: number }) => await fetchEpisodes(parent.id),
|
||||
coverImage: async (parent: TitleType) =>
|
||||
await imageResolver(parent.coverImage),
|
||||
bannerImage: async (parent: TitleType) =>
|
||||
await imageResolver(parent.bannerImage),
|
||||
};
|
||||
234
src/schema.ts
Normal file
234
src/schema.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
export const typeDefs = /* GraphQL */ `
|
||||
# ====================
|
||||
# Scalars & Enums
|
||||
# ====================
|
||||
|
||||
scalar JSONObject
|
||||
|
||||
enum WatchStatus {
|
||||
COMPLETED
|
||||
CURRENT
|
||||
PLANNING
|
||||
DROPPED
|
||||
PAUSED
|
||||
REPEATING
|
||||
}
|
||||
|
||||
enum MediaStatus {
|
||||
FINISHED
|
||||
RELEASING
|
||||
NOT_YET_RELEASED
|
||||
CANCELLED
|
||||
HIATUS
|
||||
}
|
||||
|
||||
enum HomeCategory {
|
||||
WATCHING
|
||||
PLANNING
|
||||
}
|
||||
|
||||
enum PopularCategory {
|
||||
TRENDING
|
||||
POPULAR
|
||||
UPCOMING
|
||||
}
|
||||
|
||||
# ====================
|
||||
# Title Types
|
||||
# ====================
|
||||
|
||||
type Image {
|
||||
url: String
|
||||
placeholder: String
|
||||
}
|
||||
|
||||
type NextAiringEpisode {
|
||||
episode: Int!
|
||||
airingAt: Int!
|
||||
timeUntilAiring: Int!
|
||||
}
|
||||
|
||||
type MediaListEntry {
|
||||
status: WatchStatus
|
||||
progress: Int
|
||||
id: Int!
|
||||
updatedAt: Int
|
||||
}
|
||||
|
||||
type Episode {
|
||||
id: String!
|
||||
number: Float!
|
||||
title: String
|
||||
img: String
|
||||
description: String
|
||||
rating: Int
|
||||
updatedAt: Int!
|
||||
}
|
||||
|
||||
type Title {
|
||||
id: Int!
|
||||
idMal: Int
|
||||
title: String!
|
||||
description: String
|
||||
numEpisodes: Int
|
||||
genres: [String]
|
||||
status: MediaStatus
|
||||
bannerImage: Image
|
||||
averageScore: Int
|
||||
coverImage: Image
|
||||
countryOfOrigin: String!
|
||||
mediaListEntry: MediaListEntry
|
||||
nextAiringEpisode: NextAiringEpisode
|
||||
episodes: [Episode!]!
|
||||
}
|
||||
|
||||
# ====================
|
||||
# Home/Preview Title Type (simplified)
|
||||
# ====================
|
||||
|
||||
type HomeTitle {
|
||||
id: Int!
|
||||
idMal: Int
|
||||
title: String!
|
||||
description: String
|
||||
numEpisodes: Int
|
||||
genres: [String]
|
||||
status: MediaStatus
|
||||
bannerImage: String
|
||||
averageScore: Int
|
||||
coverImage: Image
|
||||
countryOfOrigin: String!
|
||||
}
|
||||
|
||||
# ====================
|
||||
# Response Types
|
||||
# ====================
|
||||
|
||||
type SearchResult {
|
||||
results: [HomeTitle!]!
|
||||
hasNextPage: Boolean!
|
||||
}
|
||||
|
||||
type PopularBrowse {
|
||||
trending: [HomeTitle!]!
|
||||
popular: [HomeTitle!]!
|
||||
upcoming: [HomeTitle!]
|
||||
}
|
||||
|
||||
type PopularResult {
|
||||
results: [HomeTitle!]!
|
||||
hasNextPage: Boolean!
|
||||
}
|
||||
|
||||
type EpisodeStream {
|
||||
url: String!
|
||||
subtitles: [LangUrl!]!
|
||||
audio: [LangUrl!]!
|
||||
intro: [Int!]
|
||||
outro: [Int!]
|
||||
headers: JSONObject
|
||||
}
|
||||
|
||||
type LangUrl {
|
||||
lang: String!
|
||||
url: String!
|
||||
}
|
||||
|
||||
type User {
|
||||
name: String!
|
||||
avatar: Image!
|
||||
statistics: UserAnimeStatistics!
|
||||
}
|
||||
|
||||
type UserAnimeStatistics {
|
||||
count: Int!
|
||||
meanScore: Float!
|
||||
minutesWatched: Int!
|
||||
episodesWatched: Int!
|
||||
}
|
||||
|
||||
# ====================
|
||||
# Input Types
|
||||
# ====================
|
||||
|
||||
input UpdateWatchStatusInput {
|
||||
titleId: Int!
|
||||
watchStatus: WatchStatus
|
||||
}
|
||||
|
||||
input MarkEpisodeAsWatchedInput {
|
||||
titleId: Int!
|
||||
episodeNumber: Float!
|
||||
}
|
||||
|
||||
# ====================
|
||||
# Queries
|
||||
# ====================
|
||||
|
||||
type Query {
|
||||
"""
|
||||
Simple health check to verify API is running
|
||||
"""
|
||||
healthCheck: Boolean!
|
||||
|
||||
"""
|
||||
Fetch a title by AniList ID
|
||||
"""
|
||||
title(id: Int!): Title!
|
||||
|
||||
"""
|
||||
Fetch an episode stream by ID
|
||||
"""
|
||||
episodeStream(id: String!): EpisodeStream!
|
||||
|
||||
"""
|
||||
Search for titles
|
||||
"""
|
||||
search(query: String!, page: Int = 1, limit: Int = 10): SearchResult!
|
||||
|
||||
"""
|
||||
Fetch paginated home titles for a specific category
|
||||
"""
|
||||
home(category: HomeCategory!, page: Int = 1, limit: Int = 10): [HomeTitle!]!
|
||||
|
||||
"""
|
||||
Browse popular titles across all categories (trending, popular, upcoming)
|
||||
"""
|
||||
popularBrowse(limit: Int = 10): PopularBrowse!
|
||||
|
||||
"""
|
||||
Fetch paginated popular titles for a specific category
|
||||
"""
|
||||
popularByCategory(
|
||||
category: PopularCategory!
|
||||
page: Int = 1
|
||||
limit: Int = 10
|
||||
): PopularResult!
|
||||
|
||||
"""
|
||||
Fetch the authenticated user's profile
|
||||
"""
|
||||
user: User!
|
||||
}
|
||||
|
||||
# ====================
|
||||
# Mutations
|
||||
# ====================
|
||||
|
||||
type Mutation {
|
||||
"""
|
||||
Update watch status for a title. Device ID must be provided via X-Device-ID header.
|
||||
"""
|
||||
updateWatchStatus(input: UpdateWatchStatusInput!): Boolean!
|
||||
|
||||
"""
|
||||
Mark an episode as watched. Device ID must be provided via X-Device-ID header.
|
||||
"""
|
||||
markEpisodeAsWatched(input: MarkEpisodeAsWatchedInput!): Boolean!
|
||||
|
||||
"""
|
||||
Update the user's FCM token. Device ID must be provided via X-Device-ID header.
|
||||
"""
|
||||
updateToken(token: String!): Boolean!
|
||||
}
|
||||
`;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user