adding monkeytype
Some checks failed
Mark Stale PRs / stale (push) Has been cancelled

This commit is contained in:
Benjamin Falch
2026-04-23 13:53:44 +02:00
parent e214a2fd35
commit 2bc741fb78
1930 changed files with 7590652 additions and 0 deletions

View File

@@ -0,0 +1,27 @@
#!/bin/bash
INPUT=$(cat)
FILE_PATH=$(echo "$INPUT" | jq -r '.tool_input.file_path // empty')
if [[ -z "$FILE_PATH" ]]; then
exit 0
fi
# Only run on ts/tsx/js/jsx files
if [[ "$FILE_PATH" != *.ts && "$FILE_PATH" != *.tsx && "$FILE_PATH" != *.js && "$FILE_PATH" != *.jsx ]]; then
exit 0
fi
npx oxfmt "$FILE_PATH" >&2 || true
npx oxlint --type-aware --type-check "$FILE_PATH" >&2 || true
# Run matching test file if it exists
# Map frontend/src/ts/<path>/<file>.ts(x) -> frontend/__tests__/<path>/<file>.spec.ts(x)
if [[ "$FILE_PATH" == frontend/src/ts/* ]]; then
REL="${FILE_PATH#frontend/src/ts/}"
BASE="${REL%.*}"
EXT="${REL##*.}"
TEST_FILE="frontend/__tests__/${BASE}.spec.${EXT}"
if [[ -f "$TEST_FILE" ]]; then
npx vitest run "$TEST_FILE" >&2 || true
fi
fi

15
.claude/settings.json Normal file
View File

@@ -0,0 +1,15 @@
{
"hooks": {
"PostToolUse": [
{
"matcher": "Edit|Write",
"hooks": [
{
"type": "command",
"command": "bash .claude/hooks/format-and-lint.sh"
}
]
}
]
}
}

30
.claude/skills/commit.md Normal file
View File

@@ -0,0 +1,30 @@
# Commit Changes
Trigger: user asks to commit, or uses /commit
## Steps
1. Determine what to commit:
- Run `git diff --cached --name-only` to check for staged changes.
- If there are staged changes, commit only those. Tell the user: "Committing staged changes."
- If no staged changes, run `git diff --name-only` to check for unstaged changes.
- If there are unstaged changes, stage all of them (`git add` each file by name) and commit. Tell the user: "Staging and committing all changes."
- If no changes at all, tell the user "Nothing to commit." and stop.
2. Run `git diff --cached` to see the full diff of what will be committed.
3. Write a conventional commit message:
- Format: `<type>: <description>`
- Types: `feat`, `fix`, `refactor`, `style`, `chore`, `docs`, `test`, `perf`
- Description: lowercase, imperative mood, no period, concise
- Pick the type that best fits the change. Use `feat` for new features, `fix` for bug fixes, `refactor` for code restructuring, `style` for visual/CSS-only changes, `chore` for maintenance/tooling.
- Add a body (separated by blank line) only if the description alone is insufficient to understand the change.
4. Show the user the proposed commit message and ask for confirmation before committing.
5. On confirmation, create the commit. Use a HEREDOC for the message:
```
git commit -m "$(cat <<'EOF'
<type>: <description>
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
EOF
)"
```
6. Run `git status` after to verify success.

21
.claude/skills/review.md Normal file
View File

@@ -0,0 +1,21 @@
# Review Changed Files
Trigger: user asks to review changes, review code, or uses /review
## Steps
1. Determine what to review, in priority order:
- If the user has selected specific lines/code, review that selection. Tell the user: "Reviewing selection."
- Otherwise, run `git diff --cached --name-only`. If there are staged changes, review only staged changes (`git diff --cached`). Tell the user: "Reviewing staged changes."
- Otherwise, run `git diff --name-only`. If there are unstaged changes, review only unstaged changes (`git diff`). Tell the user: "Reviewing unstaged changes."
- Otherwise, review the last commit (`git diff HEAD~1`). Tell the user: "Reviewing last commit."
2. Read the full file for each changed file (not just the diff) to understand context.
3. Review for:
- **Bugs**: null/undefined access, race conditions, off-by-one errors, missing error handling at system boundaries
- **Dead code**: unused imports, variables, functions, or parameters introduced or left behind by the changes
- **Redundancy**: code that duplicates existing logic or can be simplified
- **Consistency**: does the change follow patterns established in surrounding code and project conventions (see CLAUDE.md)
- **Tailwind**: non-canonical classes, inline styles that should be Tailwind, missing responsive variants if siblings have them
- **Solid-specific**: broken reactivity, missing cleanup, doing things not the "Solid way"
- **Improvements**: any other changes that would make the code more robust, readable, maintainable, better
4. Output a concise list of findings. Highlight which issues should be absolutely fixed before merging/committing. If nothing found, say "No issues found."

23
.dockerignore Normal file
View File

@@ -0,0 +1,23 @@
node_modules
frontend/node_modules
backend/node_modules
# Firebase
.firebase/
.firebaserc
serviceAccountKey*.json
frontend/src/ts/constants/firebase-config.ts
frontend/src/ts/constants/firebase-config-live.ts
#frontend
frontend/.env
#cloudflare
.cloudflareKey.txt
.cloudflareKey_copy.txt
#backend
backend/src/credentials/*.json
backend/.env
backend/dist

5
.editorconfig Normal file
View File

@@ -0,0 +1,5 @@
root = true
[*.{html,js,css,scss,json,yml,yaml}]
indent_size = 2
indent_style = space

12
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: monkeytype
open_collective: # Replace with a single Open Collective username
ko_fi: monkeytype
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ["https://www.monkeytype.store/"]

137
.github/ISSUE_TEMPLATE/bug_report.yaml vendored Normal file
View File

@@ -0,0 +1,137 @@
name: Bug report
description: Create a report to help us improve
labels: [bug]
body:
- type: markdown
attributes:
value: |
# Welcome
```
Thanks for taking the time to fill out this bug! If you need real-time help, join us on Discord: discord.gg/monkeytype
```
- type: checkboxes
attributes:
label: Did you clear cache before opening an issue?
description: Sometimes your browser has old files cached and the bug you are experiencing might be already fixed, or is just a side effect of a new update. If you don't know how to do that, this website should help https://www.pcmag.com/how-to/how-to-clear-your-cache-on-any-browser.
options:
- label: I have cleared my cache
required: true
- type: checkboxes
attributes:
label: Is there an existing issue for this?
description: Please [search](https://github.com/monkeytypegame/monkeytype/issues?q=is%3Aissue) to see if an issue already exists for the bug you encountered.
options:
- label: I have [searched](https://github.com/monkeytypegame/monkeytype/issues?q=is%3Aissue) the existing open and closed issues
required: true
- type: markdown
attributes:
value: |
# Basic debugging
```
Below fields are very important to quickly track down the issue, so please take the time to carefully check when the issue happens and when it does not.
```
- type: dropdown
attributes:
label: Does the issue happen when logged in?
options: ["Yes", "No", "N/A"]
validations:
required: true
- type: dropdown
attributes:
label: Does the issue happen when logged out?
options: ["Yes", "No"]
validations:
required: true
- type: dropdown
attributes:
label: Does the issue happen in incognito mode when logged in?
options: ["Yes", "No", "N/A"]
validations:
required: true
- type: dropdown
attributes:
label: Does the issue happen in incognito mode when logged out?
options: ["Yes", "No"]
validations:
required: true
- type: textarea
attributes:
label: Account name
description: Your Monkeytype account name.
placeholder: |
Miodec
validations:
required: false
- type: textarea
attributes:
label: Account config
description: If your issue only happens when logged in, please provide your config. To export your config, go to the Settings page, scroll all the way down to `import/export settings` and click `export`.
placeholder: |
Miodec
{"theme":"cyberspace","showKeyTips":false,"showLiveWpm":false,"showTimerProgress":false, ... "smoothCaret":true}
validations:
required: false
- type: markdown
attributes:
value: |
# Issue details
```
Please provide a detailed description of what's happening, and most importantly - solid steps to reproduce the issue. This will help us find it quicker.
```
- type: textarea
attributes:
label: Current Behavior
description: A concise description of what you're experiencing.
validations:
required: false
- type: textarea
attributes:
label: Expected Behavior
description: A concise description of what you expected to happen.
validations:
required: false
- type: textarea
attributes:
label: Steps To Reproduce
description: Steps to reproduce the behavior.
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
validations:
required: false
- type: textarea
attributes:
label: Environment
description: |
examples:
- **OS**: Windows 10
- **Browser**: Google Chrome
- **Browser Version**: 94.0.4606.71 (Official Build) (64-bit)
value: |
- OS:
- Browser:
- Browser Version:
validations:
required: false
- type: textarea
attributes:
label: Anything else?
description: |
Links? References? Anything that will give us more context about the issue you are encountering!
Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
validations:
required: false

5
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@@ -0,0 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: Feature Request / Idea
url: https://github.com/monkeytypegame/monkeytype/discussions
about: Please do not create issues for feature requests. Instead, use GitHub Discussions.

28
.github/copilot-instructions.md vendored Normal file
View File

@@ -0,0 +1,28 @@
# Monkeytype AI Coding Instructions
Make the responses extremely concise. Sacrifice grammar for the sake of concision.
## Architecture
**Monorepo**: pnpm + Turborepo with frontend (Vite + SolidJS), backend (Express + MongoDB + Redis), and shared packages.
## Commands
All commands support `-fe`, `-be`, `-pkg` suffixes for targeted execution:
```bash
pnpm run lint-fe # Frontend linting
pnpm run test-be # Backend + integration tests
pnpm run build-pkg # Packages only
pnpm run dev # All workspaces with hot reload
```
## SolidJS Migration
Frontend is partially migrated - new components use SolidJS (`.tsx`), legacy code remains vanilla JS.
## Debug Tips
- Type/lint errors: Run `pnpm run lint` (OXLint is source of truth, not tsc)
## Key Files
- `turbo.json`: Task deps and caching
- `frontend/src/ts/config-metadata.ts`: Config validation rules
- `packages/contracts/src/index.ts`: API contract structure
- `packages/funbox/src/list.ts`: All funbox definitions
- `backend/src/api/routes/index.ts`: ts-rest setup

7
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/"
versioning-strategy: increase
schedule:
interval: "weekly"

29
.github/labeler.yml vendored Normal file
View File

@@ -0,0 +1,29 @@
api:
- any: ["frontend/src/ts/ape/**/*", "backend/src/api/**/*"]
assets:
- any: ["frontend/static/**/*"]
all: ["!frontend/static/**/*.html"]
backend:
- any: ["backend/**/*"]
docs:
- any: ["**/*.md"]
frontend:
- any: ["frontend/**/*"]
packages:
- any: ["packages/**/*"]
local dev:
- any:
[
"**/turbo.json",
"**/tsconfig.json",
"**/knip.json",
"**/.prettierrc",
"**/.oxlintrc.json",
"**/.eslintrc.cjs",
]

55
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,55 @@
### Description
<!--
Please describe the change(s) made in your PR:
- explain the problem being solved
- for bug fixes without an open issue, include steps to reproduce the issue
- summarize the approach taken
Use your own words. Do not rely on AI-generated descriptions.
They do not demonstrate your understanding of the problem or the solution.
Writing the description yourself helps you verify the scope of your work and
helps us better understand your intent, reasoning and level of insight.
-->
### Checks
- [ ] Adding quotes?
- Make sure to follow the [quotes documentation](https://github.com/monkeytypegame/monkeytype/blob/master/docs/QUOTES.md)
- [ ] Make sure to include translations for the quotes in the description (or another comment) so we can verify their content.
- [ ] Adding a language?
- Make sure to follow the [languages documentation](https://github.com/monkeytypegame/monkeytype/blob/master/docs/LANGUAGES.md)
- [ ] Add language to `packages/schemas/src/languages.ts`
- [ ] Add language to exactly one group in `frontend/src/ts/constants/languages.ts`
- [ ] Add language json file to `frontend/static/languages`
- [ ] Adding a theme?
- Make sure to follow the [themes documentation](https://github.com/monkeytypegame/monkeytype/blob/master/docs/THEMES.md)
- [ ] Add theme to `packages/schemas/src/themes.ts`
- [ ] Add theme to `frontend/src/ts/constants/themes.ts`
- [ ] (optional) Add theme css file to `frontend/static/themes`
- [ ] Add some screenshots of the theme, especially with different test settings (colorful, flip colors) to your pull request
- [ ] Adding a layout?
- [ ] Make sure to follow the [layouts documentation](https://github.com/monkeytypegame/monkeytype/blob/master/docs/LAYOUTS.md)
- [ ] Add layout to `packages/schemas/src/layouts.ts`
- [ ] Add layout json file to `frontend/static/layouts`
- [ ] Adding a font?
- Make sure to follow the [fonts documentation](https://github.com/monkeytypegame/monkeytype/blob/master/docs/FONTS.md)
- [ ] Add font file to `frontend/static/webfonts`
- [ ] Add font to `packages/schemas/src/fonts.ts`
- [ ] Add font to `frontend/src/ts/constants/fonts.ts`
- [ ] Check if any open issues are related to this PR; if so, be sure to tag them below.
- [ ] Make sure the PR title follows the Conventional Commits standard. (https://www.conventionalcommits.org for more info)
- [ ] Make sure to include your GitHub username prefixed with @ inside parentheses at the end of the PR title.
<!-- label(optional scope): pull request title (@your_github_username) -->
<!-- I know I know they seem boring but please do them, they help us and you will find out it also helps you. -->
Closes #
<!-- The issue(s) your PR resolves if any (delete if that is not the case) -->
<!-- Please reference any issues and/or PRs related to your pull request -->
<!-- Pro tip: you can mention an issue, PR, or discussion on GitHub by referencing its hash number, e.g.: [#1234](https://github.com/monkeytypegame/monkeytype/pull/1234) -->
<!-- Pro tip: you can press . (dot or period) in the code tab of any GitHub repo to get access to GitHub's VS Code web editor. Enjoy! :) -->

61
.github/workflows/check-formatting.yml vendored Normal file
View File

@@ -0,0 +1,61 @@
name: Check formatting
env:
PNPM_VERSION: "10.28.1"
NODE_VERSION: "24.11.0"
on:
pull_request:
branches: [master]
types: [opened, reopened, synchronize, ready_for_review]
permissions:
contents: read
concurrency:
group: group-format-check-${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
jobs:
check:
if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'force-ci') || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
runs-on: ubuntu-latest
steps:
- name: Full checkout
uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Install formatter
run: pnpm install -D -w oxfmt
- name: Get changed files
id: get-changed-files
uses: actions/github-script@v7
with:
script: |
const changedFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.payload.pull_request.number,
}
);
return changedFiles.filter(file=> file.status !== "removed").map(file => file.filename).join(' ');
- name: Check formatting (changed files)
run: |
CHANGED_FILES=$(echo ${{ steps.get-changed-files.outputs.result }})
if [ -n "$CHANGED_FILES" ]; then
pnpm oxfmt $CHANGED_FILES --check --no-error-on-unmatched-pattern
fi

23
.github/workflows/check-todo.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: PR Todo Checker
on:
pull_request_review_comment:
types: [edited, deleted]
pull_request:
types: [opened, synchronize, reopened]
jobs:
find_todos:
runs-on: ubuntu-latest
permissions:
pull-requests: write # to comment on PRs
contents: read
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Check for Todos
uses: phntmxyz/pr_todo_checker@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,39 @@
name: Comment on PR for CI Failure
permissions:
pull-requests: write
on:
workflow_run:
workflows: [Monkey CI]
types: [completed]
jobs:
on-failure:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'failure' }}
steps:
- name: Download workflow artifact
uses: actions/download-artifact@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
- name: Read the pr_num file
id: pr_num_reader
uses: juliangruber/read-file-action@v1
with:
path: ./pr_num/pr_num.txt
- name: Create comment
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ steps.pr_num_reader.outputs.content }}
body: |
Continuous integration check(s) failed. Please review the [failing check\'s logs](${{ github.event.workflow_run.html_url }}) and make the necessary changes.
- name: Apply label changes
uses: PauMAVA/add-remove-label-action@v1.0.3
with:
issue_number: ${{ steps.pr_num_reader.outputs.content }}
add: "waiting for update"
remove: "waiting for review"

View File

@@ -0,0 +1,40 @@
name: Claude Code Review
on:
pull_request:
types: [opened, synchronize, ready_for_review, reopened]
# Optional: Only run on specific file changes
# paths:
# - "src/**/*.ts"
# - "src/**/*.tsx"
# - "src/**/*.js"
# - "src/**/*.jsx"
jobs:
claude-review:
if: >-
contains(fromJson('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.pull_request.author_association)
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code Review
id: claude-review
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
plugin_marketplaces: "https://github.com/anthropics/claude-code.git"
plugins: "code-review@claude-code-plugins"
prompt: "/code-review:code-review ${{ github.repository }}/pull/${{ github.event.pull_request.number }}"
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options

51
.github/workflows/claude.yml vendored Normal file
View File

@@ -0,0 +1,51 @@
name: Claude Code
on:
issue_comment:
types: [created]
pull_request_review_comment:
types: [created]
issues:
types: [opened, assigned]
pull_request_review:
types: [submitted]
jobs:
claude:
if: |
(
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude') && contains(fromJson('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.comment.author_association)) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude') && contains(fromJson('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.comment.author_association)) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude') && contains(fromJson('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.review.author_association)) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')) && contains(fromJson('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.issue.author_association))
)
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: read
issues: read
id-token: write
actions: read # Required for Claude to read CI results on PRs
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
# This is an optional setting that allows Claude to read CI results on PRs
additional_permissions: |
actions: read
# Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
# prompt: 'Update the pull request description to include a summary of changes.'
# Optional: Add claude_args to customize behavior and configuration
# See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
# or https://code.claude.com/docs/en/cli-reference for available options
# claude_args: '--allowed-tools Bash(gh pr:*)'

70
.github/workflows/fix-formatting.yml vendored Normal file
View File

@@ -0,0 +1,70 @@
name: Fix formatting
env:
PNPM_VERSION: "10.28.1"
NODE_VERSION: "24.11.0"
permissions:
contents: write
pull-requests: write
on:
pull_request_target:
types: [labeled]
jobs:
format:
runs-on: ubuntu-latest
if: github.event.label.name == 'format'
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
repository: ${{ github.event.pull_request.head.repo.full_name}}
ref: ${{ github.event.pull_request.head.sha }}
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Install formatter
run: pnpm install -D -w oxfmt --ignore-scripts
- name: Get changed files
id: get-changed-files
uses: actions/github-script@v7
with:
script: |
const changedFiles = await github.paginate(
github.rest.pulls.listFiles,
{
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.payload.pull_request.number,
}
);
return changedFiles.filter(file=> file.status !== "removed").map(file => file.filename).join(' ');
- name: Fix formatting
env:
CHANGED_FILES: ${{ steps.get-changed-files.outputs.result }}
run: |
if [ -n "$CHANGED_FILES" ]; then
echo "$CHANGED_FILES" | tr ' ' '\n' | xargs pnpm oxfmt --no-error-on-unmatched-pattern
fi
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "fix formatting"
- name: Remove label
uses: actions-ecosystem/action-remove-labels@v1
with:
labels: format

14
.github/workflows/labeler.yml vendored Normal file
View File

@@ -0,0 +1,14 @@
name: "Pull Request Labeler"
on:
- pull_request_target
jobs:
triage:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v4
with:
repo-token: "${{ secrets.API_TOKEN }}"

346
.github/workflows/monkey-ci.yml vendored Normal file
View File

@@ -0,0 +1,346 @@
name: Monkey CI
env:
PNPM_VERSION: "10.28.1"
NODE_VERSION: "24.11.0"
RECAPTCHA_SITE_KEY: "6Lc-V8McAAAAAJ7s6LGNe7MBZnRiwbsbiWts87aj"
permissions:
contents: read
on:
pull_request:
branches: [master]
types: [opened, reopened, synchronize, ready_for_review]
push:
branches: [master]
concurrency:
group: group-${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
jobs:
pre-ci:
if: github.event.pull_request.draft == false || contains(github.event.pull_request.labels.*.name, 'force-ci') || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
name: pre-ci
runs-on: ubuntu-latest
outputs:
should-build-be: ${{ steps.export-changes.outputs.should-build-be }}
should-build-fe: ${{ steps.export-changes.outputs.should-build-fe }}
should-build-pkg: ${{ steps.export-changes.outputs.should-build-pkg }}
assets-json: ${{ steps.export-changes.outputs.assets-json }}
steps:
- name: Full checkout
uses: actions/checkout@v4
# paths filter doesn't need checkout on pr
if: github.event_name != 'pull_request'
- name: Detect changes
uses: dorny/paths-filter@v4
id: filter
with:
filters: |
json:
- 'frontend/static/**/*'
be-src:
- 'backend/**/*.{ts,js,json,lua,css,html}'
- 'backend/package.json'
fe-src:
- 'frontend/**/*.{ts,scss,html}'
- 'frontend/package.json'
pkg-src:
- 'packages/**/*'
anti-cheat:
- 'backend/**/anticheat/**'
- name: Check Anti-cheat
if: steps.filter.outputs.anti-cheat == 'true' && !contains(github.event.pull_request.labels.*.name, 'force-ci') && !contains(github.event.pull_request.labels.*.name, 'force-full-ci')
run: exit 1
- name: Export changes
id: export-changes
run: |
echo "should-build-pkg=${{ steps.filter.outputs.pkg-src }}" >> $GITHUB_OUTPUT
echo "should-build-be=${{ steps.filter.outputs.be-src }}" >> $GITHUB_OUTPUT
echo "should-build-fe=${{ steps.filter.outputs.fe-src }}" >> $GITHUB_OUTPUT
echo "assets-json=${{ steps.filter.outputs.json }}" >> $GITHUB_OUTPUT
prime-cache:
name: prime-cache
runs-on: ubuntu-latest
needs: [pre-ci]
if: needs.pre-ci.outputs.should-build-be == 'true' || needs.pre-ci.outputs.should-build-fe == 'true' || needs.pre-ci.outputs.should-build-pkg == 'true' || needs.pre-ci.outputs.assets-json == 'true' || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
steps:
- name: Checkout pnpm-lock
uses: actions/checkout@v4
with:
sparse-checkout: |
pnpm-lock.yaml
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Cache node modules
id: cache-pnpm
uses: actions/cache@v4
env:
cache-name: node-modules
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-${{ env.NODE_VERSION }}-build-${{ env.cache-name }}-${{ hashFiles('pnpm-lock.yaml') }}
lookup-only: true
- if: ${{ steps.cache-pnpm.outputs.cache-hit != 'true' }}
name: Full checkout
uses: actions/checkout@v4
- if: ${{ steps.cache-pnpm.outputs.cache-hit != 'true' }}
name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- if: ${{ steps.cache-pnpm.outputs.cache-hit != 'true' }}
name: Install dependencies
run: pnpm install
ci-be:
name: ci-be
needs: [pre-ci, prime-cache]
runs-on: ubuntu-latest
if: needs.pre-ci.outputs.should-build-be == 'true' || needs.pre-ci.outputs.should-build-pkg == 'true' || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: |
backend
packages
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Cache node modules
id: cache-pnpm
uses: actions/cache@v4
env:
cache-name: node-modules
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-${{ env.NODE_VERSION }}-build-${{ env.cache-name }}-${{ hashFiles('pnpm-lock.yaml') }}
- name: Install dependencies
run: pnpm install
- name: Check lint
run: npm run lint-fast-be && npm run lint-be
- name: Build
run: npm run build-be
- name: Test
run: npm run test-be
ci-fe:
name: ci-fe
needs: [pre-ci, prime-cache]
runs-on: ubuntu-latest
if: needs.pre-ci.outputs.should-build-fe == 'true' || needs.pre-ci.outputs.should-build-pkg == 'true' || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: |
frontend
packages
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Create stub firebase config
working-directory: ./frontend/src/ts/constants
run: mv ./firebase-config-example.ts ./firebase-config.ts && cp ./firebase-config.ts ./firebase-config-live.ts
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Cache node modules
id: cache-pnpm
uses: actions/cache@v4
env:
cache-name: node-modules
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-${{ env.NODE_VERSION }}-build-${{ env.cache-name }}-${{ hashFiles('pnpm-lock.yaml') }}
- name: Install dependencies
run: pnpm install
- name: Check lint
run: npm run lint-fast-fe && npm run lint-fe
- name: Build
run: npm run build-fe
- name: Test
run: npm run test-fe
ci-assets:
name: ci-assets
needs: [pre-ci, prime-cache]
runs-on: ubuntu-latest
if: needs.pre-ci.outputs.assets-json == 'true' || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: |
frontend
packages
- uses: dorny/paths-filter@v4
id: filter
with:
filters: |
languages:
- 'frontend/static/languages/**'
quotes:
- 'frontend/static/quotes/**'
others:
- 'frontend/static/layouts/**'
- 'frontend/static/themes/**'
- 'frontend/static/webfonts/**'
- 'frontend/static/challenges/**'
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Cache node modules
id: cache-pnpm
uses: actions/cache@v4
env:
cache-name: node-modules
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-${{ env.NODE_VERSION }}-build-${{ env.cache-name }}-${{ hashFiles('pnpm-lock.yaml') }}
- name: Install dependencies
run: pnpm install
- name: Lint JSON
run: npm run lint-json-assets
- name: Validate language assets
if: steps.filter.outputs.languages == 'true'
run: npm run check-assets-languages
- name: Validate quote assets
if: steps.filter.outputs.quotes == 'true'
run: npm run check-assets-quotes
- name: Validate other assets
if: steps.filter.outputs.others == 'true'
run: npm run check-assets-others
ci-pkg:
name: ci-pkg
needs: [pre-ci, prime-cache]
runs-on: ubuntu-latest
if: needs.pre-ci.outputs.should-build-pkg == 'true' || contains(github.event.pull_request.labels.*.name, 'force-full-ci')
steps:
- uses: actions/checkout@v4
with:
sparse-checkout: |
packages
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: ${{ env.PNPM_VERSION }}
- name: Get pnpm store directory
shell: bash
run: |
echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
- name: Cache node modules
id: cache-pnpm
uses: actions/cache@v4
env:
cache-name: node-modules
with:
path: ${{ env.STORE_PATH }}
key: ${{ runner.os }}-${{ env.NODE_VERSION }}-build-${{ env.cache-name }}-${{ hashFiles('pnpm-lock.yaml') }}
- name: Install dependencies
run: pnpm install
- name: Check lint
run: npm run lint-fast-pkg && npm run lint-pkg
- name: Build
run: npm run build-pkg
- name: Test
run: npm run test-pkg
on-failure:
name: on-failure
runs-on: ubuntu-latest
needs: [ci-be, ci-fe, ci-assets, ci-pkg]
if: ${{ always() && contains(needs.*.result, 'failure') && github.ref != 'refs/heads/master' }}
steps:
- name: Save the PR number in an artifact
shell: bash
env:
PR_NUM: ${{ github.event.number }}
run: echo $PR_NUM > pr_num.txt
- name: Upload the PR number
uses: actions/upload-artifact@v4
with:
name: pr_num
path: ./pr_num.txt

View File

@@ -0,0 +1,87 @@
name: Publish Docker image
permissions:
contents: read
on:
release:
types: [published]
workflow_dispatch:
jobs:
push_to_registry:
env:
BE_REPO: monkeytype/monkeytype-backend
FE_REPO: monkeytype/monkeytype-frontend
PLATFORMS: linux/amd64,linux/arm64
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
- name: Log in to Docker Hub
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
- name: Backend extract metadata (tags, labels)
id: bemeta
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
with:
images: ${{ env.BE_REPO }}
tags: |
type=semver,pattern={{version}}
- name: Backend build and push Docker image
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
with:
context: .
platforms: ${{ env.PLATFORMS }}
file: ./docker/backend/Dockerfile
push: true
tags: ${{ env.BE_REPO }}:latest,${{ steps.bemeta.outputs.tags }}
labels: ${{ steps.bemeta.outputs.labels }}
build-args: |
server_version: ${{ github.event.release.tag_name }}
- name: Backend publish description
uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
repository: ${{ env.BE_REPO }}
short-description: Official backend server for monkeytype.com
readme-filepath: ./docs/SELF_HOSTING.md
- name: Frontend extract metadata (tags, labels)
id: femeta
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
with:
images: ${{ env.FE_REPO }}
tags: |
type=semver,pattern={{version}}
- name: Frontend build and push Docker image
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
with:
context: .
platforms: ${{ env.PLATFORMS }}
file: ./docker/frontend/Dockerfile
push: true
tags: ${{ env.FE_REPO }}:latest,${{ steps.femeta.outputs.tags }}
labels: ${{ steps.femeta.outputs.labels }}
- name: Frontend publish description
uses: peter-evans/dockerhub-description@e98e4d1628a5f3be2be7c231e50981aee98723ae
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
repository: ${{ env.FE_REPO }}
short-description: Official frontend server for monkeytype.com
readme-filepath: ./docs/SELF_HOSTING.md

73
.github/workflows/semantic-pr-title.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
name: "Semantic PR Title"
on:
pull_request_target:
types:
- opened
- edited
- synchronize
- reopened
permissions:
pull-requests: write
jobs:
main:
name: check
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.user.login != 'dependabot[bot]' }}
steps:
- name: Lint and verify PR title
uses: amannn/action-semantic-pull-request@v5
id: lint_pr_title
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
types: |
build
chore
ci
docs
feat
impr
fix
perf
refactor
revert
style
test
requireScope: false
subjectPattern: ^.+ \(@[^ ,]+(, @[^ ,]+)*\)$
subjectPatternError: |
Title "{title}"
didn't match the configured pattern. Please ensure that the title
contains your name so that you can be credited in our changelog.
- uses: marocchino/sticky-pull-request-comment@v2
# When the previous steps fails, the workflow would stop. By adding this
# condition you can continue the execution with the populated error message.
if: always() && (steps.lint_pr_title.outputs.error_message != null)
with:
header: pr-title-lint-error
message: |
Hey there and thank you for opening this pull request! 👋🏼
We require pull request titles to follow the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/) and also include the author name at the end inside parenthesis. It looks like your proposed title needs to be adjusted.
Details:
```
${{ steps.lint_pr_title.outputs.error_message }}
```
A correct version would look something like:
feat: add new feature (@github_username)
impr(quotes): add english quotes (@username)
fix(leaderboard): show user rank correctly (@user1, @user2, @user3)
# Delete a previous comment when the issue has been resolved
- if: ${{ steps.lint_pr_title.outputs.error_message == null }}
uses: marocchino/sticky-pull-request-comment@v2
with:
header: pr-title-lint-error
delete: true

16
.github/workflows/stale-pr.yml vendored Normal file
View File

@@ -0,0 +1,16 @@
name: "Mark Stale PRs"
on:
schedule:
- cron: "30 20 * * *"
permissions:
pull-requests: write
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v8
with:
stale-pr-message: "This PR is stale. Please trigger a re-run of the PR check action."
days-before-stale: 7

100
.github/workflows/update-labels.yml vendored Normal file
View File

@@ -0,0 +1,100 @@
name: Check labels to update
permissions:
actions: read
pull-requests: read
on:
pull_request_target:
types:
[
review_requested,
ready_for_review,
review_request_removed,
converted_to_draft,
synchronize,
edited,
]
pull_request_review:
types: [submitted, edited, dismissed]
pull_request_review_comment:
types: [created, edited]
issue_comment:
types: [created, edited]
jobs:
update-labels:
runs-on: ubuntu-latest
env:
PR_NUM: ${{ github.event.pull_request.number || github.event.issue.number }}
steps:
- name: Set up varibles
run: |
echo "REVIEW=0" >> $GITHUB_ENV
echo "UPDATE=0" >> $GITHUB_ENV
- name: Add 'waiting for review' label
# when a review is requested or if the PR is converted from a draft
if: |
github.event_name == 'pull_request_target' &&
contains(fromJSON('["review_requested", "ready_for_review"]'), github.event.action)
run: echo "REVIEW=1" >> $GITHUB_ENV
- name: Remove 'waiting for review' label
# when a review request is removed or if the PR is converted to a draft
# or when the PR is reviewed by the owner, a member or a collaborator
if: |
(
github.event_name == 'pull_request_target' &&
contains(fromJSON('["review_request_removed", "converted_to_draft"]'), github.event.action)
) ||
(
github.event_name == 'pull_request_review' &&
contains(fromJSON('["submitted", "edited"]'), github.event.action) &&
contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.review.author_association)
)
run: echo "REVIEW=-1" >> $GITHUB_ENV
- name: Add 'waiting for update' label
# when a review by one of {owner, member, collaborator} requests changes
if: |
github.event_name == 'pull_request_review' &&
github.event.review.state == 'changes_requested' &&
contains(fromJSON('["OWNER", "MEMBER", "COLLABORATOR"]'), github.event.review.author_association)
run: echo "UPDATE=1" >> $GITHUB_ENV
- name: Remove 'waiting for update' label from PR/issue
# when PR is commited to or if the PR is edited or if a review is requested or dismissed
# or when a comment is added by the author to the review or to the main PR thread
if: |
(
github.event_name == 'pull_request_target' &&
contains(fromJSON('["synchronize", "edited", "review_requested"]'), github.event.action)
) ||
(
github.event_name == 'pull_request_review' &&
github.event.action == 'dismissed'
) ||
(
github.event_name == 'pull_request_review_comment' &&
contains(fromJSON('["created", "edited"]'), github.event.action) &&
github.event.comment.user.id == github.event.pull_request.user.id
) ||
(
github.event_name == 'issue_comment' &&
contains(fromJSON('["created", "edited"]'), github.event.action) &&
github.event.comment.user.id == github.event.issue.user.id
)
run: echo "UPDATE=-1" >> $GITHUB_ENV
- name: Save result in a JSON file
env:
LABELS_JSON: ${{ format('{{"waiting_for_review"{0} "{1}", "waiting_for_update"{0} "{2}", "pr_num"{0} "{3}"}}', ':', env.REVIEW, env.UPDATE, env.PR_NUM) }}
run: echo $LABELS_JSON > write-labels.json
- name: Upload the JSON file
uses: actions/upload-artifact@v4
with:
name: labels
path: ./write-labels.json

52
.github/workflows/write-labels.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
name: Write label on PR/issue
permissions:
pull-requests: write
issues: write
on:
workflow_run:
workflows: [Check labels to update]
types: [completed]
jobs:
write-labels:
if: ${{ github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- name: Download workflow artifact
uses: actions/download-artifact@v4
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ github.event.workflow_run.id }}
- name: Read json file
id: json_reader
uses: juliangruber/read-file-action@v1
with:
path: ./labels/write-labels.json
- name: Add `waiting for review` label
if: fromJSON(steps.json_reader.outputs.content).waiting_for_review == 1
run: echo "ADD_LABELS=${ADD_LABELS}waiting for review," >> $GITHUB_ENV
- name: Remove `waiting for review` label
if: fromJSON(steps.json_reader.outputs.content).waiting_for_review == -1
run: echo "REMOVE_LABELS=${REMOVE_LABELS}waiting for review," >> $GITHUB_ENV
- name: Add `waiting for update` label
if: fromJSON(steps.json_reader.outputs.content).waiting_for_update == 1
run: echo "ADD_LABELS=${ADD_LABELS}waiting for update," >> $GITHUB_ENV
- name: Remove `waiting for update` label
if: fromJSON(steps.json_reader.outputs.content).waiting_for_update == -1
run: echo "REMOVE_LABELS=${REMOVE_LABELS}waiting for update," >> $GITHUB_ENV
- name: Apply label changes
if: env.ADD_LABELS || env.REMOVE_LABELS
uses: PauMAVA/add-remove-label-action@v1.0.3
with:
issue_number: ${{ fromJSON(steps.json_reader.outputs.content).pr_num }}
add: ${{ env.ADD_LABELS }}
remove: ${{ env.REMOVE_LABELS }}

135
.gitignore vendored Normal file
View File

@@ -0,0 +1,135 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
firebase-debug.log*
# Firebase cache
.firebase/
# Firebase config
# Uncomment this if you'd like others to create their own Firebase project.
# For a team working on the same Firebase project(s), it is recommended to leave
# it commented so all members can deploy to the same project(s) in .firebaserc.
# .firebaserc
# Runtime data
pids
*.pid
*.seed
*.pid.lock
#Mac files
.DS_Store
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
node_modules_bak/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env*
#vs code
.vscode/*
.vscode/.*
*.code-workspace
!monkeytype.code-workspace
.idea
#firebase
.firebaserc
.firebaserc_copy
serviceAccountKey*.json
!docker/serviceAccountKey-example.json
frontend/src/ts/constants/firebase-config.ts
frontend/src/ts/constants/firebase-config-live.ts
#generated files
dist/
frontend/public/
backend/globalConfig.json
backend/server.version
backend/src/server.version
vite-build/
#cloudflare y
.cloudflareKey.txt
.cloudflareKey_copy.txt
frontend/static/adtest.html
backend/lastId.txt
backend/log_success.txt
backend/credentials/*.json
backend/.env
static/adtest.html
backend/migrationStats.txt
# Madge
dep-graph.png
# TypeScript
build/
backend/workers
*.tsbuildinfo
#scripts
updateContributors.js
removeDupes.js
content-validation.js
ads.txt
updateContributors.mjs
copyAnticheatToDev.sh
# ignore generated fonts
frontend/src/webfonts-generated
frontend/static/webfonts-preview
.turbo
frontend/.env.sentry-build-plugin
.claude/worktrees
1024MiB

7
.husky/commit-msg Executable file
View File

@@ -0,0 +1,7 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
if [ $(git branch --no-color | sed -e '/^[^*]/d' -e 's/* \(.*\)/\1/') = "master" ] && [ $(git remote get-url origin) = "https://github.com/monkeytypegame/monkeytype" ]; then
npx --no -- commitlint --edit ${1}
fi

14
.husky/post-checkout Executable file
View File

@@ -0,0 +1,14 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Load nvm
export NVM_DIR="$HOME/.nvm"
if [ ! -s "$NVM_DIR/nvm.sh" ]; then
# nvm not available, quietly exit
exit 0
fi
. "$NVM_DIR/nvm.sh"
nvm install
pnpm i

4
.husky/pre-commit Executable file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npm run pre-commit

17
.husky/pre-push Executable file
View File

@@ -0,0 +1,17 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
# Load nvm
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
if [ $(git branch --no-color | sed -e '/^[^*]/d' -e 's/* \(.*\)/\1/') = "master" ] && [ $(git remote get-url origin) = "https://github.com/monkeytypegame/monkeytype" ]; then
nvm install
echo "Running a full check before pushing to master..."
pnpm run full-check
if [ $? -ne 0 ]; then
echo "Full check failed, aborting push."
exit 1
fi
fi

3
.npmrc Normal file
View File

@@ -0,0 +1,3 @@
engine-strict=true
save-exact=true
save-prefix=''

1
.nvmrc Normal file
View File

@@ -0,0 +1 @@
24.11.0

37
.oxfmtrc-editor.json Normal file
View File

@@ -0,0 +1,37 @@
{
"$schema": "https://raw.githubusercontent.com/oxc-project/oxc/refs/heads/main/npm/oxfmt/configuration_schema.json",
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"htmlWhitespaceSensitivity": "ignore",
"endOfLine": "lf",
"trailingComma": "all",
"ignorePatterns": [
"pnpm-lock.yaml",
"node_modules",
".turbo",
"dist",
"build",
"logs",
"coverage",
"*.md"
],
"overrides": [
{
"files": ["**/*.tsx"],
"options": {
"experimentalSortImports": {
"groups": [
"type-import",
["value-builtin", "value-external"],
"type-internal",
"value-internal",
["type-parent", "type-sibling", "type-index"],
["value-parent", "value-sibling", "value-index"],
"unknown"
]
}
}
}
]
}

42
.oxfmtrc.json Normal file
View File

@@ -0,0 +1,42 @@
{
"$schema": "https://raw.githubusercontent.com/oxc-project/oxc/refs/heads/main/npm/oxfmt/configuration_schema.json",
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"htmlWhitespaceSensitivity": "ignore",
"endOfLine": "lf",
"trailingComma": "all",
"ignorePatterns": [
"pnpm-lock.yaml",
"node_modules",
".turbo",
"dist",
"build",
"logs",
"coverage",
"*.md"
],
"overrides": [
{
"files": ["**/*.tsx"],
"options": {
"experimentalTailwindcss": {
"stylesheet": "./frontend/src/styles/tailwind.css",
"attributes": ["cn"],
"functions": ["cn"]
},
"experimentalSortImports": {
"groups": [
"type-import",
["value-builtin", "value-external"],
"type-internal",
"value-internal",
["type-parent", "type-sibling", "type-index"],
["value-parent", "value-sibling", "value-index"],
"unknown"
]
}
}
}
]
}

7
.oxlintrc.json Normal file
View File

@@ -0,0 +1,7 @@
{
"ignorePatterns": ["node_modules", "dist", ".turbo"],
"extends": [
"./packages/oxlint-config/index.jsonc"
// "@monkeytype/oxlint-config"
]
}

8
.prettierrc.json Normal file
View File

@@ -0,0 +1,8 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"htmlWhitespaceSensitivity": "ignore",
"trailingComma": "all",
"endOfLine": "lf"
}

6
CLAUDE.md Normal file
View File

@@ -0,0 +1,6 @@
Be extremely concise. Sacrifice grammar for concision.
Frontend is partially migrated from vanilla JS to SolidJS — new components use `.tsx`, legacy code remains vanilla.
Single test file: `pnpm vitest run path/to/test.ts`
For styling, use Tailwind CSS, class property, `cn` utility. Do not use classlist. Only colors available are those defined in Tailwind config.
In legacy code, use `i` tags with FontAwesome classes. In new code, use `Fa` component.
In plan mode, before writing up a plan, ask clarifying questions if needed. At the end of plan mode, give me a list of unresolved questions to answer, if any. Make them concise.

674
LICENSE Normal file
View File

@@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

72
README.md Normal file
View File

@@ -0,0 +1,72 @@
[![](https://github.com/monkeytypegame/monkeytype/blob/master/frontend/static/images/githubbanner2.png?raw=true)](https://monkeytype.com/)
<br />
[![ChartJs](https://img.shields.io/badge/Chart.js-FF6384?style=for-the-badge&logo=chartdotjs&logoColor=white)](https://www.chartjs.org/)
[![Eslint](https://img.shields.io/badge/eslint-4B32C3?style=for-the-badge&logo=eslint&logoColor=white)](https://eslint.org/)
[![Express](https://img.shields.io/badge/-Express-373737?style=for-the-badge&logo=Express&logoColor=white)](https://expressjs.com/)
[![Firebase](https://img.shields.io/badge/firebase-DD2C00?style=for-the-badge&logo=firebase&logoColor=black)](https://firebase.google.com/)
[![Fontawesome](https://img.shields.io/badge/fontawesome-538DD7?style=for-the-badge&logo=fontawesome&logoColor=white)](https://fontawesome.com/)
[![HTML5](https://img.shields.io/badge/html5-E34F26?style=for-the-badge&logo=html5&logoColor=white)](https://developer.mozilla.org/en-US/docs/Web/HTML)
[![MongoDB](https://img.shields.io/badge/-MongoDB-47A248?style=for-the-badge&logo=mongodb&logoColor=white)](https://www.mongodb.com/)
[![OXLint](https://img.shields.io/badge/oxlint-2b3c5a?style=for-the-badge&logo=oxc&logoColor=white)](https://oxc.rs/docs/guide/usage/linter.html)
[![PNPM](https://img.shields.io/badge/pnpm-F69220?style=for-the-badge&logo=pnpm&logoColor=white)](https://pnpm.io/)
[![Redis](https://img.shields.io/badge/Redis-FF4438?style=for-the-badge&logo=redis&logoColor=white)](https://redis.io/)
[![SASS](https://img.shields.io/badge/SASS-CC6699?style=for-the-badge&logo=SASS&logoColor=white)](https://sass-lang.com/)
[![Solid](https://img.shields.io/badge/solid-2C4F7C?style=for-the-badge&logo=solid&logoColor=white)](https://www.solidjs.com/)
[![Tailwind](https://img.shields.io/badge/tailwind-06B6D4?style=for-the-badge&logo=tailwindcss&logoColor=white)](https://tailwindcss.com/)
[![TsRest](https://img.shields.io/badge/-TSREST-9333ea?style=for-the-badge&logoColor=white&logo=data:image/svg%2bxml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiIHN0YW5kYWxvbmU9Im5vIj8+CjwhLS0gQ3JlYXRlZCB3aXRoIElua3NjYXBlIChodHRwOi8vd3d3Lmlua3NjYXBlLm9yZy8pIC0tPgoKPHN2ZwogICB3aWR0aD0iMjAuMzA2Nzc4bW0iCiAgIGhlaWdodD0iMTIuMDgzMjMzbW0iCiAgIHZpZXdCb3g9IjAgMCAyMC4zMDY3NzggMTIuMDgzMjMzIgogICB2ZXJzaW9uPSIxLjEiCiAgIGlkPSJzdmcxIgogICB4bWxuczppbmtzY2FwZT0iaHR0cDovL3d3dy5pbmtzY2FwZS5vcmcvbmFtZXNwYWNlcy9pbmtzY2FwZSIKICAgeG1sbnM6c29kaXBvZGk9Imh0dHA6Ly9zb2RpcG9kaS5zb3VyY2Vmb3JnZS5uZXQvRFREL3NvZGlwb2RpLTAuZHRkIgogICB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHhtbG5zOnN2Zz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgogIDxzb2RpcG9kaTpuYW1lZHZpZXcKICAgICBpZD0ibmFtZWR2aWV3MSIKICAgICBwYWdlY29sb3I9IiM1MDUwNTAiCiAgICAgYm9yZGVyY29sb3I9IiNmZmZmZmYiCiAgICAgYm9yZGVyb3BhY2l0eT0iMSIKICAgICBpbmtzY2FwZTpzaG93cGFnZXNoYWRvdz0iMCIKICAgICBpbmtzY2FwZTpwYWdlb3BhY2l0eT0iMCIKICAgICBpbmtzY2FwZTpwYWdlY2hlY2tlcmJvYXJkPSIxIgogICAgIGlua3NjYXBlOmRlc2tjb2xvcj0iI2QxZDFkMSIKICAgICBpbmtzY2FwZTpkb2N1bWVudC11bml0cz0ibW0iIC8+CiAgPGRlZnMKICAgICBpZD0iZGVmczEiIC8+CiAgPGcKICAgICBpbmtzY2FwZTpsYWJlbD0iTGF5ZXIgMSIKICAgICBpbmtzY2FwZTpncm91cG1vZGU9ImxheWVyIgogICAgIGlkPSJsYXllcjEiCiAgICAgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoLTMuODE5ODA1NCwtMi4yMTQ3MTkzKSI+CiAgICA8cGF0aAogICAgICAgZD0ibSAxNS40NTgwMzUsOC45NzMzOTUzIDguNjMzMjUsMC4wNDQ4NyAwLjAwOSwtMS42NjgxOTggLTguNjMzMjIsLTAuMDQ0ODUgeiBtIDAuMDI2MywtNS4wNTYxMDggOC42MzMyNSwwLjA0NDg1IDAuMDA5LC0xLjcwMjU2OCAtOC42MzMyNSwtMC4wNDQ4NSB6IG0gLTAuMDQ0OCw4LjYzMzI0NzcgOC42MzMyMywwLjA0NDg1IC0wLjAwOSwxLjcwMjU2NyAtOC42MzMyNSwtMC4wNDQ4NSB6IgogICAgICAgZmlsbD0iI2ZmZmZmZiIKICAgICAgIGlkPSJwYXRoMSIKICAgICAgIHN0eWxlPSJzdHJva2Utd2lkdGg6MC4yNjQ1ODMiIC8+CiAgICA8cGF0aAogICAgICAgZD0ibSAxMS4xMTE3MjUsMTAuMjg2NjI4IGMgMS42NTEsLTAuNjE5MTI0NyAyLjU5Njg4LC0xLjk2MDU2MjcgMi41OTY4OCwtMy44MDA3Mzk3IDAsLTIuNjQ4NDc5IC0xLjkyNjE2LC00LjI0Nzg4NSAtNS4wNzMzNzk2LC00LjI0Nzg4NSBoIC00LjgxNTQyIHYgMS43MDI1OTQgaCA0Ljc0NjYzIGMgMi4wODA5Mzk2LDAgMy4xNjQ0MDk2LDAuOTI4Njg3IDMuMTY0NDA5NiwyLjU0NTI5MSAwLDEuNTk5NDA2IC0xLjA4MzQ3LDIuNTQ1MjkyIC0zLjE2NDQwOTYsMi41NDUyOTIgaCAtNC43NDY2MyB2IDUuMjQ1MzYzNyBoIDEuOTYwNTYgdiAtMy41NzcxNjYgaCAyLjg1NDg2IGMgMC4yMDYzNywwIDAuNDI5OTUsMCAwLjYxOTEyLC0wLjAxNzIgbCAyLjUyODA5OTYsMy41OTQzNjQgaCAyLjEzMjU0IHoiCiAgICAgICBmaWxsPSIjZmZmZmZmIgogICAgICAgaWQ9InBhdGgyIgogICAgICAgc3R5bGU9InN0cm9rZS13aWR0aDowLjI2NDU4MyIgLz4KICA8L2c+Cjwvc3ZnPgo=)](https://ts-rest.com/)
[![Turborepo](https://img.shields.io/badge/-Turborepo-FF1E56?style=for-the-badge&logo=turborepo&logoColor=white)](https://turborepo.org/)
[![TypeScript](https://img.shields.io/badge/typescript-3178C6?style=for-the-badge&logo=typescript&logoColor=white)](https://www.typescriptlang.org/)
[![Vite](https://img.shields.io/badge/Vite-9135FF?style=for-the-badge&logo=Vite&logoColor=white)](https://vitejs.dev/)
[![Vitest](https://img.shields.io/badge/vitest-00FF74?style=for-the-badge&logo=vitest&logoColor=white)](https://vitest.dev/)
[![Zod](https://img.shields.io/badge/-Zod-408AFF?style=for-the-badge&logo=zod&logoColor=white)](https://zod.dev/)
# About
Monkeytype is a minimalistic and customizable [typing test](https://www.monkeytype.com). It features many test modes, an account system to save your typing speed history, and user-configurable features such as themes, sounds, a smooth caret, and more. Monkeytype attempts to emulate a natural typing experience during a typing test by unobtrusively presenting the text prompts and displaying typed characters in place, providing straightforward, real-time feedback on typos, speed, and accuracy.
# Features
- minimalistic design, with optional advertisements and focus mode while typing
- type what you see, see what you type
- live errors, wpm, and accuracy displays
- a variety of test lengths and languages
- punctuation and numbers modes
- quotes
- themes
- smooth caret
- account system
- challenges and just-for-fun test modifiers
- and much more
# Discord bot
On the [Monkeytype Discord server](https://www.discord.gg/monkeytype), we added a Discord bot to auto-assign optional roles based on typing performance and challenge completion. You can find its code over at https://github.com/monkeytypegame/monkeytype-bot.
# Bug report or Feature request
If you encounter a bug or have a feature request, [send us an email](mailto:contact@monkeytype.com), [create an issue](https://github.com/monkeytypegame/monkeytype/issues), [create a discussion thread](https://github.com/monkeytypegame/monkeytype/discussions), or [join the Discord server](https://www.discord.gg/monkeytype).
# Want to Contribute?
Refer to [CONTRIBUTING.md](./docs/CONTRIBUTING.md).
# Code of Conduct
Before contributing to this repository, please read the [code of conduct](./docs/CODE_OF_CONDUCT.md).
# Security
To report a security vulnerability, please refer to [SECURITY.md](./docs/SECURITY.md).
# Credits
[Montydrei](https://www.reddit.com/user/montydrei) for the name suggestion.
Everyone who provided valuable feedback on the [original Reddit post](https://www.reddit.com/r/MechanicalKeyboards/comments/gc6wx3/experimenting_with_a_completely_new_type_of/) for the prototype of this website.
All of the [contributors](https://github.com/monkeytypegame/monkeytype/graphs/contributors) have helped implement various features, add themes, fix bugs, and more.
# Support
If you wish to support further development and feel extra awesome, you can [donate](https://ko-fi.com/monkeytype), [become a Patron](https://www.patreon.com/monkeytype) or [buy a t-shirt](https://www.monkeytype.store/).

5
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
lastId.txt
log_success.txt
log_failed.txt
build
worker.*

16
backend/.oxlintrc.json Normal file
View File

@@ -0,0 +1,16 @@
{
"ignorePatterns": ["node_modules", "__migration__", "dist", ".turbo"],
"extends": [
"../packages/oxlint-config/index.jsonc",
"../packages/oxlint-config/plugin.jsonc"
// "@monkeytype/oxlint-config"
],
"overrides": [
{
"files": ["src/**/*.ts"],
"rules": {
"import/no-cycle": "off" //todo: fix cycles and turn this on
}
}
]
}

3
backend/.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"oxc.fmt.configPath": "../.oxfmtrc-editor.json"
}

View File

@@ -0,0 +1,258 @@
import "dotenv/config";
import * as DB from "../src/init/db";
import { Collection, Db } from "mongodb";
import readlineSync from "readline-sync";
import { DBUser } from "../src/dal/user";
import { DBResult } from "../src/utils/result";
const batchSize = 50;
let appRunning = true;
let db: Db | undefined;
let userCollection: Collection<DBUser>;
let resultCollection: Collection<DBResult>;
const filter = { testActivity: { $exists: false } };
process.on("SIGINT", () => {
console.log("\nshutting down...");
appRunning = false;
});
if (require.main === module) {
void main();
}
async function main(): Promise<void> {
try {
console.log(
`Connecting to database ${process.env["DB_NAME"]} on ${process.env["DB_URI"]}...`,
);
if (!readlineSync.keyInYN("Ready to start migration?")) {
appRunning = false;
}
if (appRunning) {
await DB.connect();
console.log("Connected to database");
db = DB.getDb();
if (db === undefined) {
throw Error("db connection failed");
}
await migrate();
}
console.log(`\nMigration ${appRunning ? "done" : "aborted"}.`);
} catch (e) {
console.log("error occured:", { e });
} finally {
await DB.close();
}
}
export async function migrate(): Promise<void> {
userCollection = DB.collection("users");
resultCollection = DB.collection("results");
console.log("Creating index on users collection...");
const t1 = Date.now();
await userCollection.createIndex({ uid: 1 }, { unique: true });
console.log("Index created in", Date.now() - t1, "ms");
await migrateResults();
}
async function migrateResults(): Promise<void> {
const allUsersCount = await userCollection.countDocuments(filter);
if (allUsersCount === 0) {
console.log("No users to migrate.");
return;
} else {
console.log("Users to migrate:", allUsersCount);
}
console.log(`Migrating ~${allUsersCount} users using batchSize=${batchSize}`);
let count = 0;
const start = new Date().valueOf();
let uids: string[] = [];
do {
const t0 = Date.now();
console.log("Fetching users to migrate...");
const t1 = Date.now();
uids = await getUsersToMigrate(batchSize);
console.log("Fetched", uids.length, "users in", Date.now() - t1, "ms");
console.log("Users to migrate:", uids.join(","));
//migrate
const t2 = Date.now();
await migrateUsers(uids);
console.log("Migrated", uids.length, "users in", Date.now() - t2, "ms");
const t3 = Date.now();
await handleUsersWithNoResults(uids);
console.log("Handled users with no results in", Date.now() - t3, "ms");
//progress tracker
count += uids.length;
updateProgress(allUsersCount, count, start, Date.now() - t0);
} while (uids.length > 0 && appRunning);
if (appRunning) updateProgress(100, 100, start, 0);
}
async function getUsersToMigrate(limit: number): Promise<string[]> {
return (
await userCollection
.find(filter, { limit })
.project({ uid: 1, _id: 0 })
.toArray()
).map((it) => it["uid"]);
}
async function migrateUsers(uids: string[]): Promise<void> {
await resultCollection
.aggregate(
[
{
$match: {
uid: { $in: uids },
},
},
{
$project: {
_id: 0,
timestamp: -1,
uid: 1,
},
},
{
$addFields: {
date: {
$toDate: "$timestamp",
},
},
},
{
$replaceWith: {
uid: "$uid",
year: {
$year: "$date",
},
day: {
$dayOfYear: "$date",
},
},
},
{
$group: {
_id: {
uid: "$uid",
year: "$year",
day: "$day",
},
count: {
$sum: 1,
},
},
},
{
$group: {
_id: {
uid: "$_id.uid",
year: "$_id.year",
},
days: {
$addToSet: {
day: "$_id.day",
tests: "$count",
},
},
},
},
{
$replaceWith: {
uid: "$_id.uid",
days: {
$function: {
lang: "js",
args: ["$days", "$_id.year"],
body: `function (days, year) {
var max = Math.max(
...days.map((it) => it.day)
)-1;
var arr = new Array(max).fill(null);
for (day of days) {
arr[day.day-1] = day.tests;
}
let result = {};
result[year] = arr;
return result;
}`,
},
},
},
},
{
$group: {
_id: "$uid",
testActivity: {
$mergeObjects: "$days",
},
},
},
{
$addFields: {
uid: "$_id",
},
},
{
$project: {
_id: 0,
},
},
{
$merge: {
into: "users",
on: "uid",
whenMatched: "merge",
whenNotMatched: "discard",
},
},
],
{ allowDiskUse: true },
)
.toArray();
}
async function handleUsersWithNoResults(uids: string[]): Promise<void> {
await userCollection.updateMany(
{
$and: [{ uid: { $in: uids } }, filter],
},
{ $set: { testActivity: {} } },
);
}
function updateProgress(
all: number,
current: number,
start: number,
previousBatchSizeTime: number,
): void {
const percentage = (current / all) * 100;
const timeLeft = Math.round(
(((new Date().valueOf() - start) / percentage) * (100 - percentage)) / 1000,
);
process.stdout.clearLine?.(0);
process.stdout.cursorTo?.(0);
process.stdout.write(
`Previous batch took ${Math.round(previousBatchSizeTime)}ms (~${
previousBatchSizeTime / batchSize
}ms per user) ${Math.round(
percentage,
)}% done, estimated time left ${timeLeft} seconds.`,
);
}

View File

@@ -0,0 +1,75 @@
import { describe, it, expect } from "vitest";
import * as Migration from "../../../__migration__/testActivity";
import * as UserTestData from "../../__testData__/users";
import * as UserDal from "../../../src/dal/user";
import * as ResultDal from "../../../src/dal/result";
import { DBResult } from "../../../src/utils/result";
describe("testActivity migration", () => {
it("migrates users without results", async () => {
//given
const user1 = await UserTestData.createUser();
const user2 = await UserTestData.createUser();
//when
await Migration.migrate();
//then
const readUser1 = await UserDal.getUser(user1.uid, "");
expect(readUser1.testActivity).toEqual({});
const readUser2 = await UserDal.getUser(user2.uid, "");
expect(readUser2.testActivity).toEqual({});
});
it("migrates users with results", async () => {
//given
const withResults = await UserTestData.createUserWithoutMigration();
const withoutResults = await UserTestData.createUserWithoutMigration();
const uid = withResults.uid;
//2023-01-02
await createResult(uid, 1672621200000);
//2024-01-01
await createResult(uid, 1704070800000);
await createResult(uid, 1704070800000 + 3600000);
await createResult(uid, 1704070800000 + 3600000);
//2024-01-02
await createResult(uid, 1704157200000);
//2024-01-03
await createResult(uid, 1704243600000);
//when
await Migration.migrate();
//then
const readWithResults = await UserDal.getUser(withResults.uid, "");
expect(readWithResults.testActivity).toEqual({
"2023": [null, 1],
"2024": [3, 1, 1],
});
const readWithoutResults = await UserDal.getUser(withoutResults.uid, "");
expect(readWithoutResults.testActivity).toEqual({});
});
});
async function createResult(uid: string, timestamp: number): Promise<void> {
await ResultDal.addResult(uid, {
wpm: 0,
rawWpm: 0,
charStats: [1, 2, 3, 4],
acc: 0,
mode: "time",
mode2: "60",
timestamp: timestamp,
testDuration: 1,
consistency: 0,
keyConsistency: 0,
chartData: "toolong",
name: "",
} as unknown as DBResult);
}

View File

@@ -0,0 +1,30 @@
import { describe, it, expect } from "vitest";
import { ObjectId } from "mongodb";
import * as AdminUidsDal from "../../../src/dal/admin-uids";
describe("AdminUidsDal", () => {
describe("isAdmin", () => {
it("should return true for existing admin user", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
await AdminUidsDal.getCollection().insertOne({
_id: new ObjectId(),
uid: uid,
});
//WHEN / THEN
expect(await AdminUidsDal.isAdmin(uid)).toBe(true);
});
it("should return false for non-existing admin user", async () => {
//GIVEN
await AdminUidsDal.getCollection().insertOne({
_id: new ObjectId(),
uid: "admin",
});
//WHEN / THEN
expect(await AdminUidsDal.isAdmin("regularUser")).toBe(false);
});
});
});

View File

@@ -0,0 +1,107 @@
import { describe, it, expect, vi, beforeEach } from "vitest";
import { ObjectId } from "mongodb";
import {
addApeKey,
DBApeKey,
editApeKey,
getApeKey,
updateLastUsedOn,
} from "../../../src/dal/ape-keys";
describe("ApeKeysDal", () => {
beforeEach(() => {
vi.useFakeTimers();
});
describe("addApeKey", () => {
it("should be able to add a new ape key", async () => {
const apeKey = buildApeKey();
const apeKeyId = await addApeKey(apeKey);
expect(apeKeyId).toBe(apeKey._id.toHexString());
const read = await getApeKey(apeKeyId);
expect(read).toEqual({
...apeKey,
});
});
});
describe("editApeKey", () => {
it("should edit name of an existing ape key", async () => {
//GIVEN
const apeKey = buildApeKey({ useCount: 5, enabled: true });
const apeKeyId = await addApeKey(apeKey);
//WHEN
const newName = "new name";
await editApeKey(apeKey.uid, apeKeyId, newName, undefined);
//THENa
const readAfterEdit = (await getApeKey(apeKeyId)) as DBApeKey;
expect(readAfterEdit).toEqual({
...apeKey,
name: newName,
modifiedOn: Date.now(),
});
});
it("should edit enabled of an existing ape key", async () => {
//GIVEN
const apeKey = buildApeKey({ useCount: 5, enabled: true });
const apeKeyId = await addApeKey(apeKey);
//WHEN
await editApeKey(apeKey.uid, apeKeyId, undefined, false);
//THEN
const readAfterEdit = (await getApeKey(apeKeyId)) as DBApeKey;
expect(readAfterEdit).toEqual({
...apeKey,
enabled: false,
modifiedOn: Date.now(),
});
});
});
describe("updateLastUsedOn", () => {
it("should update lastUsedOn and increment useCount when editing with lastUsedOn", async () => {
//GIVEN
const apeKey = buildApeKey({
useCount: 5,
lastUsedOn: 42,
});
const apeKeyId = await addApeKey(apeKey);
//WHEN
await updateLastUsedOn(apeKey.uid, apeKeyId);
await updateLastUsedOn(apeKey.uid, apeKeyId);
//THENa
const readAfterEdit = (await getApeKey(apeKeyId)) as DBApeKey;
expect(readAfterEdit).toEqual({
...apeKey,
modifiedOn: readAfterEdit.modifiedOn,
lastUsedOn: Date.now(),
useCount: 5 + 2,
});
});
});
});
function buildApeKey(overrides: Partial<DBApeKey> = {}): DBApeKey {
return {
_id: new ObjectId(),
uid: "123",
name: "test",
hash: "12345",
createdOn: Date.now(),
modifiedOn: Date.now(),
lastUsedOn: Date.now(),
useCount: 0,
enabled: true,
...overrides,
};
}

View File

@@ -0,0 +1,363 @@
import {
describe,
it,
expect,
beforeAll,
beforeEach,
afterEach,
vi,
} from "vitest";
import { ObjectId } from "mongodb";
import * as BlacklistDal from "../../../src/dal/blocklist";
describe("BlocklistDal", () => {
beforeAll(async () => {
await BlacklistDal.createIndicies();
});
describe("add", () => {
beforeEach(() => {
vi.useFakeTimers();
});
afterEach(() => {
vi.useRealTimers();
});
it("adds user", async () => {
//GIVEN
const now = 1715082588;
vi.setSystemTime(now);
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
//WHEN
await BlacklistDal.add({ name, email });
//THEN
await expect(
BlacklistDal.getCollection().findOne({
emailHash: BlacklistDal.hash(email),
}),
).resolves.toMatchObject({
emailHash: BlacklistDal.hash(email),
timestamp: now,
});
await expect(
BlacklistDal.getCollection().findOne({
usernameHash: BlacklistDal.hash(name),
}),
).resolves.toMatchObject({
usernameHash: BlacklistDal.hash(name),
timestamp: now,
});
});
it("adds user with discordId", async () => {
//GIVEN
const now = 1715082588;
vi.setSystemTime(now);
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const discordId = `${name}DiscordId`;
//WHEN
await BlacklistDal.add({ name, email, discordId });
//THEN
await expect(
BlacklistDal.getCollection().findOne({
discordIdHash: BlacklistDal.hash(discordId),
}),
).resolves.toMatchObject({
discordIdHash: BlacklistDal.hash(discordId),
timestamp: now,
});
});
it("adds user should not create duplicate name", async () => {
//GIVEN
const now = 1715082588;
vi.setSystemTime(now);
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const email2 = `${name}@otherdomain.com`;
await BlacklistDal.add({ name, email });
//WHEN
await BlacklistDal.add({ name, email: email2 });
//THEN
await expect(
BlacklistDal.getCollection()
.find({
usernameHash: BlacklistDal.hash(name),
})
.toArray(),
).resolves.toHaveLength(1);
await expect(
BlacklistDal.getCollection()
.find({
emailHash: BlacklistDal.hash(email),
})
.toArray(),
).resolves.toHaveLength(1);
await expect(
BlacklistDal.getCollection()
.find({
emailHash: BlacklistDal.hash(email2),
})
.toArray(),
).resolves.toHaveLength(1);
});
it("adds user should not create duplicate email", async () => {
//GIVEN
const now = 1715082588;
vi.setSystemTime(now);
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const name2 = "user" + new ObjectId().toHexString();
await BlacklistDal.add({ name, email });
//WHEN
await BlacklistDal.add({ name: name2, email });
//THEN
await expect(
BlacklistDal.getCollection()
.find({
emailHash: BlacklistDal.hash(email),
})
.toArray(),
).resolves.toHaveLength(1);
});
it("adds user should not create duplicate discordId", async () => {
//GIVEN
const now = 1715082588;
vi.setSystemTime(now);
const name = "user" + new ObjectId().toHexString();
const name2 = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const discordId = `${name}DiscordId`;
await BlacklistDal.add({ name, email, discordId });
//WHEN
await BlacklistDal.add({ name: name2, email, discordId });
//THEN
await expect(
BlacklistDal.getCollection()
.find({
discordIdHash: BlacklistDal.hash(discordId),
})
.toArray(),
).resolves.toHaveLength(1);
});
});
describe("contains", () => {
it("contains user", async () => {
//GIVEN
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const discordId = `${name}DiscordId`;
await BlacklistDal.add({ name, email, discordId });
await BlacklistDal.add({ name: "test", email: "test@example.com" });
//WHEN / THEN
//by name
await expect(BlacklistDal.contains({ name })).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ name: name.toUpperCase() }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ name, email: "unknown", discordId: "unknown" }),
).resolves.toBeTruthy();
//by email
await expect(BlacklistDal.contains({ email })).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ email: email.toUpperCase() }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ name: "unknown", email, discordId: "unknown" }),
).resolves.toBeTruthy();
//by discordId
await expect(BlacklistDal.contains({ discordId })).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ discordId: discordId.toUpperCase() }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ name: "unknown", email: "unknown", discordId }),
).resolves.toBeTruthy();
//by name and email and discordId
await expect(
BlacklistDal.contains({ name, email, discordId }),
).resolves.toBeTruthy();
});
it("does not contain user", async () => {
//GIVEN
await BlacklistDal.add({ name: "test", email: "test@example.com" });
await BlacklistDal.add({ name: "test2", email: "test2@example.com" });
//WHEN / THEN
await expect(
BlacklistDal.contains({ name: "unknown" }),
).resolves.toBeFalsy();
await expect(
BlacklistDal.contains({ email: "unknown" }),
).resolves.toBeFalsy();
await expect(
BlacklistDal.contains({ discordId: "unknown" }),
).resolves.toBeFalsy();
await expect(
BlacklistDal.contains({
name: "unknown",
email: "unknown",
discordId: "unknown",
}),
).resolves.toBeFalsy();
await expect(BlacklistDal.contains({})).resolves.toBeFalsy();
});
});
describe("remove", () => {
it("removes existing username", async () => {
//GIVEN
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
await BlacklistDal.add({ name, email });
await BlacklistDal.add({ name: "test", email: "test@example.com" });
//WHEN
await BlacklistDal.remove({ name });
//THEN
await expect(BlacklistDal.contains({ name })).resolves.toBeFalsy();
await expect(BlacklistDal.contains({ email })).resolves.toBeTruthy();
//decoy still exists
await expect(
BlacklistDal.contains({ name: "test" }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ email: "test@example.com" }),
).resolves.toBeTruthy();
});
it("removes existing email", async () => {
//GIVEN
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
await BlacklistDal.add({ name, email });
await BlacklistDal.add({ name: "test", email: "test@example.com" });
//WHEN
await BlacklistDal.remove({ email });
//THEN
await expect(BlacklistDal.contains({ email })).resolves.toBeFalsy();
await expect(BlacklistDal.contains({ name })).resolves.toBeTruthy();
//decoy still exists
await expect(
BlacklistDal.contains({ name: "test" }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ email: "test@example.com" }),
).resolves.toBeTruthy();
});
it("removes existing discordId", async () => {
//GIVEN
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const discordId = `${name}DiscordId`;
await BlacklistDal.add({ name, email, discordId });
await BlacklistDal.add({
name: "test",
email: "test@example.com",
discordId: "testDiscordId",
});
//WHEN
await BlacklistDal.remove({ discordId });
//THEN
await expect(BlacklistDal.contains({ discordId })).resolves.toBeFalsy();
await expect(BlacklistDal.contains({ name })).resolves.toBeTruthy();
await expect(BlacklistDal.contains({ email })).resolves.toBeTruthy();
//decoy still exists
await expect(
BlacklistDal.contains({ name: "test" }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ email: "test@example.com" }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ discordId: "testDiscordId" }),
).resolves.toBeTruthy();
});
it("removes existing username,email and discordId", async () => {
//GIVEN
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const discordId = `${name}DiscordId`;
await BlacklistDal.add({ name, email, discordId });
await BlacklistDal.add({
name: "test",
email: "test@example.com",
discordId: "testDiscordId",
});
//WHEN
await BlacklistDal.remove({ name, email, discordId });
//THEN
await expect(BlacklistDal.contains({ email })).resolves.toBeFalsy();
await expect(BlacklistDal.contains({ name })).resolves.toBeFalsy();
await expect(BlacklistDal.contains({ discordId })).resolves.toBeFalsy();
//decoy still exists
await expect(
BlacklistDal.contains({ name: "test" }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ email: "test@example.com" }),
).resolves.toBeTruthy();
await expect(
BlacklistDal.contains({ discordId: "testDiscordId" }),
).resolves.toBeTruthy();
});
it("does not remove for empty user", async () => {
//GIVEN
const name = "user" + new ObjectId().toHexString();
const email = `${name}@example.com`;
const discordId = `${name}DiscordId`;
await BlacklistDal.add({ name, email, discordId });
await BlacklistDal.add({ name: "test", email: "test@example.com" });
//WHEN
await BlacklistDal.remove({});
//THEN
await expect(BlacklistDal.contains({ email })).resolves.toBeTruthy();
await expect(BlacklistDal.contains({ name })).resolves.toBeTruthy();
await expect(BlacklistDal.contains({ discordId })).resolves.toBeTruthy();
});
});
describe("hash", () => {
it("hashes case insensitive", () => {
["test", "TEST", "tESt"].forEach((value) =>
expect(BlacklistDal.hash(value)).toEqual(
"9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08",
),
);
});
});
});

View File

@@ -0,0 +1,42 @@
import { ObjectId } from "mongodb";
import { describe, expect, it } from "vitest";
import * as ConfigDal from "../../../src/dal/config";
const getConfigCollection = ConfigDal.__testing.getConfigCollection;
describe("ConfigDal", () => {
describe("saveConfig", () => {
it("should save and update user configuration correctly", async () => {
//GIVEN
const uid = new ObjectId().toString();
await getConfigCollection().insertOne({
uid,
config: {
ads: "on",
time: 60,
quickTab: true, //legacy value
},
} as any);
//WHEN
await ConfigDal.saveConfig(uid, {
ads: "on",
difficulty: "normal",
} as any);
//WHEN
await ConfigDal.saveConfig(uid, { ads: "off" });
//THEN
const savedConfig = (await ConfigDal.getConfig(
uid,
)) as ConfigDal.DBConfig;
expect(savedConfig.config.ads).toBe("off");
expect(savedConfig.config.time).toBe(60);
//should remove legacy values
expect((savedConfig.config as any)["quickTab"]).toBeUndefined();
});
});
});

View File

@@ -0,0 +1,493 @@
import {
describe,
it,
expect,
vi,
beforeAll,
beforeEach,
afterEach,
} from "vitest";
import { ObjectId } from "mongodb";
import * as ConnectionsDal from "../../../src/dal/connections";
import { createConnection } from "../../__testData__/connections";
import { createUser } from "../../__testData__/users";
describe("ConnectionsDal", () => {
beforeAll(async () => {
await ConnectionsDal.createIndicies();
});
describe("getRequests", () => {
it("get by uid", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const initOne = await createConnection({ initiatorUid: uid });
const initTwo = await createConnection({ initiatorUid: uid });
const friendOne = await createConnection({ receiverUid: uid });
const _decoy = await createConnection({});
//WHEN / THEM
expect(
await ConnectionsDal.getConnections({
initiatorUid: uid,
receiverUid: uid,
}),
).toStrictEqual([initOne, initTwo, friendOne]);
});
it("get by uid and status", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const initAccepted = await createConnection({
initiatorUid: uid,
status: "accepted",
});
const _initPending = await createConnection({
initiatorUid: uid,
status: "pending",
});
const initBlocked = await createConnection({
initiatorUid: uid,
status: "blocked",
});
const friendAccepted = await createConnection({
receiverUid: uid,
status: "accepted",
});
const _friendPending = await createConnection({
receiverUid: uid,
status: "pending",
});
const _decoy = await createConnection({ status: "accepted" });
//WHEN / THEN
expect(
await ConnectionsDal.getConnections({
initiatorUid: uid,
receiverUid: uid,
status: ["accepted", "blocked"],
}),
).toStrictEqual([initAccepted, initBlocked, friendAccepted]);
});
});
describe("create", () => {
const now = 1715082588;
beforeEach(() => {
vi.useFakeTimers();
vi.setSystemTime(now);
});
afterEach(() => {
vi.useRealTimers();
});
it("should fail creating duplicates", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
initiatorUid: uid,
});
//WHEN/THEN
await expect(
createConnection({
initiatorUid: first.receiverUid,
receiverUid: uid,
}),
).rejects.toThrow("Connection request already sent");
});
it("should create", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const receiverUid = new ObjectId().toHexString();
//WHEN
const created = await ConnectionsDal.create(
{ uid, name: "Bob" },
{ uid: receiverUid, name: "Kevin" },
2,
);
//THEN
expect(created).toEqual({
_id: created._id,
initiatorUid: uid,
initiatorName: "Bob",
receiverUid: receiverUid,
receiverName: "Kevin",
lastModified: now,
status: "pending",
key: `${uid}/${receiverUid}`,
});
});
it("should fail if maximum connections are reached", async () => {
//GIVEN
const initiatorUid = new ObjectId().toHexString();
await createConnection({ initiatorUid });
await createConnection({ initiatorUid });
//WHEN / THEM
await expect(createConnection({ initiatorUid }, 2)).rejects.toThrow(
"Maximum number of connections reached\nStack: create connection request",
);
});
it("should fail creating if blocked", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
initiatorUid: uid,
status: "blocked",
});
//WHEN/THEN
await expect(
createConnection({
initiatorUid: first.receiverUid,
receiverUid: uid,
}),
).rejects.toThrow("Connection blocked");
});
});
describe("updateStatus", () => {
const now = 1715082588;
beforeEach(() => {
vi.useFakeTimers();
vi.setSystemTime(now);
});
afterEach(() => {
vi.useRealTimers();
});
it("should update the status", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
receiverUid: uid,
lastModified: 100,
});
const second = await createConnection({
receiverUid: uid,
lastModified: 200,
});
//WHEN
await ConnectionsDal.updateStatus(
uid,
first._id.toHexString(),
"accepted",
);
//THEN
expect(await ConnectionsDal.getConnections({ receiverUid: uid })).toEqual(
[{ ...first, status: "accepted", lastModified: now }, second],
);
//can update twice to the same status
await ConnectionsDal.updateStatus(
uid,
first._id.toHexString(),
"accepted",
);
});
it("should fail if uid does not match the reeceiverUid", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
initiatorUid: uid,
});
//WHEN / THEN
await expect(
ConnectionsDal.updateStatus(uid, first._id.toHexString(), "accepted"),
).rejects.toThrow("No permission or connection not found");
});
});
describe("deleteById", () => {
it("should delete by initiator", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
initiatorUid: uid,
});
const second = await createConnection({
initiatorUid: uid,
});
//WHEN
await ConnectionsDal.deleteById(uid, first._id.toHexString());
//THEN
expect(
await ConnectionsDal.getConnections({ initiatorUid: uid }),
).toStrictEqual([second]);
});
it("should delete by receiver", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
receiverUid: uid,
});
const second = await createConnection({
receiverUid: uid,
status: "accepted",
});
//WHEN
await ConnectionsDal.deleteById(uid, first._id.toHexString());
//THEN
expect(
await ConnectionsDal.getConnections({
initiatorUid: second.initiatorUid,
}),
).toStrictEqual([second]);
});
it("should fail if uid does not match", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = await createConnection({
initiatorUid: uid,
});
//WHEN / THEN
await expect(
ConnectionsDal.deleteById("Bob", first._id.toHexString()),
).rejects.toThrow("No permission or connection not found");
});
it("should fail if initiator deletes blocked by receiver", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const myRequestWasBlocked = await createConnection({
initiatorName: uid,
status: "blocked",
});
//WHEN / THEN
await expect(
ConnectionsDal.deleteById(uid, myRequestWasBlocked._id.toHexString()),
).rejects.toThrow("No permission or connection not found");
});
it("allow receiver to delete blocked", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const myBlockedUser = await createConnection({
receiverUid: uid,
status: "blocked",
});
//WHEN
await ConnectionsDal.deleteById(uid, myBlockedUser._id.toHexString());
//THEN
expect(await ConnectionsDal.getConnections({ receiverUid: uid })).toEqual(
[],
);
});
});
describe("deleteByUid", () => {
it("should delete by uid", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const _initOne = await createConnection({ initiatorUid: uid });
const _initTwo = await createConnection({ initiatorUid: uid });
const _friendOne = await createConnection({ receiverUid: uid });
const decoy = await createConnection({});
//WHEN
await ConnectionsDal.deleteByUid(uid);
//THEN
expect(
await ConnectionsDal.getConnections({
initiatorUid: uid,
receiverUid: uid,
}),
).toEqual([]);
expect(
await ConnectionsDal.getConnections({
initiatorUid: decoy.initiatorUid,
}),
).toEqual([decoy]);
});
});
describe("updateName", () => {
it("should update the name", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const initOne = await createConnection({
initiatorUid: uid,
initiatorName: "Bob",
});
const initTwo = await createConnection({
initiatorUid: uid,
initiatorName: "Bob",
});
const friendOne = await createConnection({
receiverUid: uid,
receiverName: "Bob",
});
const decoy = await createConnection({});
//WHEN
await ConnectionsDal.updateName(uid, "King Bob");
//THEN
expect(
await ConnectionsDal.getConnections({
initiatorUid: uid,
receiverUid: uid,
}),
).toEqual([
{ ...initOne, initiatorName: "King Bob" },
{ ...initTwo, initiatorName: "King Bob" },
{ ...friendOne, receiverName: "King Bob" },
]);
expect(
await ConnectionsDal.getConnections({
initiatorUid: decoy.initiatorUid,
}),
).toEqual([decoy]);
});
});
describe("getFriendsUids", () => {
it("should return friend uids", async () => {
//GIVE
const uid = new ObjectId().toHexString();
const friendOne = await createConnection({
initiatorUid: uid,
status: "accepted",
});
const friendTwo = await createConnection({
receiverUid: uid,
status: "accepted",
});
const friendThree = await createConnection({
receiverUid: uid,
status: "accepted",
});
const _pending = await createConnection({
initiatorUid: uid,
status: "pending",
});
const _blocked = await createConnection({
initiatorUid: uid,
status: "blocked",
});
const _decoy = await createConnection({});
//WHEN
const friendUids = await ConnectionsDal.getFriendsUids(uid);
//THEN
expect(friendUids).toEqual([
uid,
friendOne.receiverUid,
friendTwo.initiatorUid,
friendThree.initiatorUid,
]);
});
});
describe("aggregateWithAcceptedConnections", () => {
it("should return friend uids", async () => {
//GIVE
const uid = (await createUser()).uid;
const friendOne = await createConnection({
initiatorUid: uid,
receiverUid: (await createUser()).uid,
status: "accepted",
});
const friendTwo = await createConnection({
initiatorUid: (await createUser()).uid,
receiverUid: uid,
status: "accepted",
});
const friendThree = await createConnection({
initiatorUid: (await createUser()).uid,
receiverUid: uid,
status: "accepted",
});
const _pending = await createConnection({
initiatorUid: uid,
receiverUid: (await createUser()).uid,
status: "pending",
});
const _blocked = await createConnection({
initiatorUid: uid,
receiverUid: (await createUser()).uid,
status: "blocked",
});
const _decoy = await createConnection({
receiverUid: (await createUser()).uid,
status: "accepted",
});
//WHEN
const friendUids = await ConnectionsDal.aggregateWithAcceptedConnections<{
uid: string;
}>({ collectionName: "users", uid }, [{ $project: { uid: true } }]);
//THEN
expect(friendUids.flatMap((it) => it.uid).toSorted()).toEqual([
uid,
friendOne.receiverUid,
friendTwo.initiatorUid,
friendThree.initiatorUid,
]);
});
it("should return friend uids and metaData", async () => {
//GIVE
const me = await createUser();
const friend = await createUser();
const connection = await createConnection({
initiatorUid: me.uid,
receiverUid: friend.uid,
status: "accepted",
});
//WHEN
const friendUids = await ConnectionsDal.aggregateWithAcceptedConnections(
{ collectionName: "users", uid: me.uid, includeMetaData: true },
[
{
$project: {
uid: true,
lastModified: "$connectionMeta.lastModified",
connectionId: "$connectionMeta._id",
},
},
],
);
//THEN
expect(friendUids).toEqual([
{
_id: friend._id,
connectionId: connection._id,
lastModified: connection.lastModified,
uid: friend.uid,
},
{
_id: me._id,
uid: me.uid,
},
]);
});
});
});

View File

@@ -0,0 +1,544 @@
import { describe, it, expect, afterEach, vi } from "vitest";
import { ObjectId } from "mongodb";
import * as UserDal from "../../../src/dal/user";
import * as LeaderboardsDal from "../../../src/dal/leaderboards";
import * as PublicDal from "../../../src/dal/public";
import type { DBLeaderboardEntry } from "../../../src/dal/leaderboards";
import type { PersonalBest } from "@monkeytype/schemas/shared";
import * as DB from "../../../src/init/db";
import { LbPersonalBests } from "../../../src/utils/pb";
import { pb } from "../../__testData__/users";
import { createConnection } from "../../__testData__/connections";
import { omit } from "../../../src/utils/misc";
describe("LeaderboardsDal", () => {
afterEach(async () => {
await DB.collection("users").deleteMany({});
});
describe("update", () => {
it("should ignore unapplicable users on leaderboard", async () => {
//GIVEN
const lbPersonalBests = lbBests(pb(100), pb(90));
const applicableUser = await createUser(lbPersonalBests);
await createUser(lbPersonalBests, { banned: true });
await createUser(lbPersonalBests, { lbOptOut: true });
await createUser(lbPersonalBests, { needsToChangeName: true });
await createUser(lbPersonalBests, { timeTyping: 0 });
await createUser(lbBests(pb(0, 90, 1)));
await createUser(lbBests(pb(60, 0, 1)));
await createUser(lbBests(pb(60, 90, 0)));
await createUser(lbBests(undefined, pb(60)));
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const results = await LeaderboardsDal.get("time", "15", "english", 0, 50);
//THEN
expect(results).toHaveLength(1);
expect(
(results as LeaderboardsDal.DBLeaderboardEntry[])[0],
).toHaveProperty("uid", applicableUser.uid);
});
it("should create leaderboard time english 15", async () => {
//GIVEN
const rank1 = await createUser(lbBests(pb(100, 90, 2)));
const rank2 = await createUser(lbBests(pb(100, 90, 1)));
const rank3 = await createUser(lbBests(pb(100, 80, 2)));
const rank4 = await createUser(lbBests(pb(90, 100, 1)));
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const results = (await LeaderboardsDal.get(
"time",
"15",
"english",
0,
50,
)) as DBLeaderboardEntry[];
//THEN
const lb = results.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("15", { rank: 1, user: rank1 }),
expectedLbEntry("15", { rank: 2, user: rank2 }),
expectedLbEntry("15", { rank: 3, user: rank3 }),
expectedLbEntry("15", { rank: 4, user: rank4 }),
]);
});
it("should create leaderboard time english 60", async () => {
//GIVEN
const rank1 = await createUser(lbBests(pb(90), pb(100, 90, 2)));
const rank2 = await createUser(lbBests(undefined, pb(100, 90, 1)));
const rank3 = await createUser(lbBests(undefined, pb(100, 80, 2)));
const rank4 = await createUser(lbBests(undefined, pb(90, 100, 1)));
//WHEN
await LeaderboardsDal.update("time", "60", "english");
const results = (await LeaderboardsDal.get(
"time",
"60",
"english",
0,
50,
)) as LeaderboardsDal.DBLeaderboardEntry[];
//THEN
const lb = results.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("60", { rank: 1, user: rank1 }),
expectedLbEntry("60", { rank: 2, user: rank2 }),
expectedLbEntry("60", { rank: 3, user: rank3 }),
expectedLbEntry("60", { rank: 4, user: rank4 }),
]);
});
it("should not include discord properties for users without discord connection", async () => {
//GIVEN
await createUser(lbBests(pb(90), pb(100, 90, 2)), {
discordId: undefined,
discordAvatar: undefined,
});
//WHEN
await LeaderboardsDal.update("time", "60", "english");
const lb = (await LeaderboardsDal.get(
"time",
"60",
"english",
0,
50,
)) as DBLeaderboardEntry[];
//THEN
expect(lb[0]).not.toHaveProperty("discordId");
expect(lb[0]).not.toHaveProperty("discordAvatar");
});
it("should remove consistency from results if null", async () => {
//GIVEN
const stats = pb(100, 90, 2);
//@ts-ignore
stats.consistency = undefined;
await createUser(lbBests(stats));
//WHEN
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const lb = (await LeaderboardsDal.get(
"time",
"15",
"english",
0,
50,
)) as DBLeaderboardEntry[];
//THEN
expect(lb[0]).not.toHaveProperty("consistency");
});
it("should update public speedHistogram for time english 15", async () => {
//GIVEN
await createUser(lbBests(pb(10), pb(60)));
await createUser(lbBests(pb(24)));
await createUser(lbBests(pb(28)));
await createUser(lbBests(pb(31)));
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const result = await PublicDal.getSpeedHistogram("english", "time", "15");
//THEN
expect(result).toEqual({ "10": 1, "20": 2, "30": 1 });
});
it("should update public speedHistogram for time english 60", async () => {
//GIVEN
await createUser(lbBests(pb(60), pb(20)));
await createUser(lbBests(undefined, pb(21)));
await createUser(lbBests(undefined, pb(110)));
await createUser(lbBests(undefined, pb(115)));
//WHEN
await LeaderboardsDal.update("time", "60", "english");
const result = await PublicDal.getSpeedHistogram("english", "time", "60");
//THEN
expect(result).toEqual({ "20": 2, "110": 2 });
});
it("should create leaderboard with badges", async () => {
//GIVEN
const noBadge = await createUser(lbBests(pb(4)));
const oneBadgeSelected = await createUser(lbBests(pb(3)), {
inventory: { badges: [{ id: 1, selected: true }] },
});
const oneBadgeNotSelected = await createUser(lbBests(pb(2)), {
inventory: { badges: [{ id: 1, selected: false }] },
});
const multipleBadges = await createUser(lbBests(pb(1)), {
inventory: {
badges: [
{ id: 1, selected: false },
{ id: 2, selected: true },
{ id: 3, selected: true },
],
},
});
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const result = (await LeaderboardsDal.get(
"time",
"15",
"english",
0,
50,
)) as DBLeaderboardEntry[];
//THEN
const lb = result.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("15", { rank: 1, user: noBadge }),
expectedLbEntry("15", {
rank: 2,
user: oneBadgeSelected,
badgeId: 1,
}),
expectedLbEntry("15", { rank: 3, user: oneBadgeNotSelected }),
expectedLbEntry("15", {
rank: 4,
user: multipleBadges,
badgeId: 2,
}),
]);
});
it("should create leaderboard with premium", async () => {
//GIVEN
vi.useRealTimers(); //timestamp for premium is calculated in mongo
const noPremium = await createUser(lbBests(pb(4)));
const lifetime = await createUser(lbBests(pb(3)), premium(-1));
const validPremium = await createUser(lbBests(pb(2)), premium(1000));
const expiredPremium = await createUser(lbBests(pb(1)), premium(-10));
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const result = (await LeaderboardsDal.get(
"time",
"15",
"english",
0,
50,
true,
)) as DBLeaderboardEntry[];
//THEN
const lb = result.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("15", { rank: 1, user: noPremium }),
expectedLbEntry("15", {
rank: 2,
user: lifetime,
isPremium: true,
}),
expectedLbEntry("15", {
rank: 3,
user: validPremium,
isPremium: true,
}),
expectedLbEntry("15", { rank: 4, user: expiredPremium }),
]);
});
it("should create leaderboard without premium if feature disabled", async () => {
//GIVEN
// const lifetime = await createUser(lbBests(pb(3)), premium(-1));
//WHEN
await LeaderboardsDal.update("time", "15", "english");
const results = (await LeaderboardsDal.get(
"time",
"15",
"english",
0,
50,
false,
)) as DBLeaderboardEntry[];
//THEN
expect(results[0]?.isPremium).toBeUndefined();
});
});
describe("get", () => {
it("should get for page", async () => {
//GIVEN
const _rank1 = await createUser(lbBests(pb(90), pb(105, 90, 2)));
const _rank2 = await createUser(lbBests(undefined, pb(100, 90, 1)));
const rank3 = await createUser(lbBests(undefined, pb(95, 80, 2)));
const rank4 = await createUser(lbBests(undefined, pb(90, 100, 1)));
await LeaderboardsDal.update("time", "60", "english");
//WHEN
const results = (await LeaderboardsDal.get(
"time",
"60",
"english",
1,
2,
true,
)) as LeaderboardsDal.DBLeaderboardEntry[];
//THEN
const lb = results.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("60", { rank: 3, user: rank3 }),
expectedLbEntry("60", { rank: 4, user: rank4 }),
]);
});
it("should get for friends only", async () => {
//GIVEN
const rank1 = await createUser(lbBests(pb(90), pb(100, 90, 2)));
const uid = rank1.uid;
const _rank2 = await createUser(lbBests(undefined, pb(100, 90, 1)));
const _rank3 = await createUser(lbBests(undefined, pb(100, 80, 2)));
const rank4 = await createUser(lbBests(undefined, pb(90, 100, 1)));
//two friends, one is not on the leaderboard
await createConnection({
initiatorUid: uid,
receiverUid: rank4.uid,
status: "accepted",
});
await createConnection({ initiatorUid: uid, status: "accepted" });
await LeaderboardsDal.update("time", "60", "english");
//WHEN
const results = (await LeaderboardsDal.get(
"time",
"60",
"english",
0,
50,
false,
uid,
)) as LeaderboardsDal.DBLeaderboardEntry[];
//THEN
const lb = results.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("60", { rank: 1, user: rank1, friendsRank: 1 }),
expectedLbEntry("60", { rank: 4, user: rank4, friendsRank: 2 }),
]);
});
it("should get for friends only with page", async () => {
//GIVEN
const rank1 = await createUser(lbBests(pb(90), pb(105, 90, 2)));
const uid = rank1.uid;
const rank2 = await createUser(lbBests(undefined, pb(100, 90, 1)));
const _rank3 = await createUser(lbBests(undefined, pb(95, 80, 2)));
const rank4 = await createUser(lbBests(undefined, pb(90, 100, 1)));
await LeaderboardsDal.update("time", "60", "english");
await createConnection({
initiatorUid: uid,
receiverUid: rank2.uid,
status: "accepted",
});
await createConnection({
initiatorUid: rank4.uid,
receiverUid: uid,
status: "accepted",
});
//WHEN
const results = (await LeaderboardsDal.get(
"time",
"60",
"english",
1,
2,
false,
uid,
)) as LeaderboardsDal.DBLeaderboardEntry[];
//THEN
const lb = results.map((it) => omit(it, ["_id"]));
expect(lb).toEqual([
expectedLbEntry("60", { rank: 4, user: rank4, friendsRank: 3 }),
]);
});
it("should return empty list if no friends", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
//WHEN
const results = (await LeaderboardsDal.get(
"time",
"60",
"english",
1,
2,
false,
uid,
)) as LeaderboardsDal.DBLeaderboardEntry[];
//THEN
expect(results).toEqual([]);
});
});
describe("getCount / getRank", () => {
it("should get count", async () => {
//GIVEN
await createUser(lbBests(undefined, pb(105)), { name: "One" });
await createUser(lbBests(undefined, pb(100)), { name: "Two" });
const me = await createUser(lbBests(undefined, pb(95)), { name: "Me" });
await createUser(lbBests(undefined, pb(90)), { name: "Three" });
await LeaderboardsDal.update("time", "60", "english");
//WHEN / THEN
expect(await LeaderboardsDal.getCount("time", "60", "english")) //
.toEqual(4);
expect(await LeaderboardsDal.getRank("time", "60", "english", me.uid)) //
.toEqual(
expect.objectContaining({
wpm: 95,
rank: 3,
name: me.name,
uid: me.uid,
}),
);
});
it("should get for friends only", async () => {
//GIVEN
const friendOne = await createUser(lbBests(undefined, pb(105)));
await createUser(lbBests(undefined, pb(100)));
await createUser(lbBests(undefined, pb(95)));
const friendTwo = await createUser(lbBests(undefined, pb(90)));
const me = await createUser(lbBests(undefined, pb(99)));
await LeaderboardsDal.update("time", "60", "english");
await createConnection({
initiatorUid: me.uid,
receiverUid: friendOne.uid,
status: "accepted",
});
await createConnection({
initiatorUid: friendTwo.uid,
receiverUid: me.uid,
status: "accepted",
});
//WHEN / THEN
expect(await LeaderboardsDal.getCount("time", "60", "english", me.uid)) //
.toEqual(3);
expect(
await LeaderboardsDal.getRank("time", "60", "english", me.uid, true),
) //
.toEqual(
expect.objectContaining({
wpm: 99,
rank: 3,
friendsRank: 2,
name: me.name,
uid: me.uid,
}),
);
});
});
});
function expectedLbEntry(
time: string,
{ rank, user, badgeId, isPremium, friendsRank }: ExpectedLbEntry,
) {
// @ts-expect-error
const lbBest: PersonalBest =
// @ts-expect-error
user.lbPersonalBests?.time[Number.parseInt(time)].english;
return {
rank,
uid: user.uid,
name: user.name,
wpm: lbBest.wpm,
acc: lbBest.acc,
timestamp: lbBest.timestamp,
raw: lbBest.raw,
consistency: lbBest.consistency,
discordId: user.discordId,
discordAvatar: user.discordAvatar,
badgeId,
isPremium,
friendsRank,
};
}
async function createUser(
lbPersonalBests?: LbPersonalBests,
userProperties?: Partial<UserDal.DBUser>,
): Promise<UserDal.DBUser> {
const uid = new ObjectId().toHexString();
await UserDal.addUser("User " + uid, uid + "@example.com", uid);
await DB.getDb()
?.collection<UserDal.DBUser>("users")
.updateOne(
{ uid },
{
$set: {
timeTyping: 7200,
discordId: "discord " + uid,
discordAvatar: "avatar " + uid,
...userProperties,
lbPersonalBests,
},
},
);
return await UserDal.getUser(uid, "test");
}
function lbBests(pb15?: PersonalBest, pb60?: PersonalBest): LbPersonalBests {
const result: LbPersonalBests = { time: {} };
if (pb15) result.time["15"] = { english: pb15 };
if (pb60) result.time["60"] = { english: pb60 };
return result;
}
function premium(expirationDeltaSeconds: number) {
return {
premium: {
startTimestamp: 0,
expirationTimestamp:
expirationDeltaSeconds === -1
? -1
: Date.now() + expirationDeltaSeconds * 1000,
},
};
}
type ExpectedLbEntry = {
rank: number;
user: UserDal.DBUser;
badgeId?: number;
isPremium?: boolean;
friendsRank?: number;
};

View File

@@ -0,0 +1,482 @@
import { describe, it, expect } from "vitest";
import { ObjectId } from "mongodb";
import * as PresetDal from "../../../src/dal/preset";
describe("PresetDal", () => {
describe("readPreset", () => {
it("should read", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const decoyUid = new ObjectId().toHexString();
const first = await PresetDal.addPreset(uid, {
name: "first",
config: { ads: "sellout" },
});
const second = await PresetDal.addPreset(uid, {
name: "second",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
});
await PresetDal.addPreset(decoyUid, {
name: "unknown",
config: {},
});
//WHEN
const read = await PresetDal.getPresets(uid);
//THEN
expect(read).toHaveLength(2);
expect(read).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first.presetId),
uid: uid,
name: "first",
config: { ads: "sellout" },
}),
expect.objectContaining({
_id: new ObjectId(second.presetId),
uid: uid,
name: "second",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
}),
]),
);
});
});
describe("addPreset", () => {
it("should return error if maximum is reached", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
for (let i = 0; i < 10; i++) {
await PresetDal.addPreset(uid, { name: "test", config: {} });
}
//WHEN / THEN
await expect(() =>
PresetDal.addPreset(uid, { name: "max", config: {} }),
).rejects.toThrow("Too many presets");
});
it("should add preset", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
for (let i = 0; i < 9; i++) {
await PresetDal.addPreset(uid, { name: "test", config: {} });
}
//WHEN
const newPreset = await PresetDal.addPreset(uid, {
name: "new",
config: {
ads: "sellout",
},
});
//THEN
const read = await PresetDal.getPresets(uid);
expect(read).toHaveLength(10);
expect(read).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(newPreset.presetId),
uid: uid,
name: "new",
config: { ads: "sellout" },
}),
]),
);
});
});
describe("editPreset", () => {
it("should not fail if preset is unknown", async () => {
const uid = new ObjectId().toHexString();
await PresetDal.editPreset(uid, {
_id: new ObjectId().toHexString(),
name: "new",
config: {},
});
});
it("should edit", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const decoyUid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
config: { ads: "sellout" },
})
).presetId;
const second = (
await PresetDal.addPreset(uid, {
name: "second",
config: {
ads: "result",
},
})
).presetId;
const decoy = (
await PresetDal.addPreset(decoyUid, {
name: "unknown",
config: { ads: "result" },
})
).presetId;
//WHEN
await PresetDal.editPreset(uid, {
_id: first,
name: "newName",
config: { ads: "off" },
});
//THEN
const read = await PresetDal.getPresets(uid);
expect(read).toHaveLength(2);
expect(read).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "newName",
config: { ads: "off" },
}),
expect.objectContaining({
_id: new ObjectId(second),
uid: uid,
name: "second",
config: { ads: "result" },
}),
]),
);
expect(await PresetDal.getPresets(decoyUid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(decoy),
uid: decoyUid,
name: "unknown",
config: { ads: "result" },
}),
]),
);
});
it("should edit with name only - full preset", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
config: { ads: "sellout" },
})
).presetId;
//WHEN empty
await PresetDal.editPreset(uid, {
_id: first,
name: "newName",
});
expect(await PresetDal.getPresets(uid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "newName",
config: { ads: "sellout" },
}),
]),
);
});
it("should edit with name only - partial preset", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
})
).presetId;
//WHEN empty
await PresetDal.editPreset(uid, {
_id: first,
name: "newName",
});
expect(await PresetDal.getPresets(uid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "newName",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
}),
]),
);
});
it("should not edit present not matching uid", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const decoyUid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
config: { ads: "sellout" },
})
).presetId;
//WHEN
await PresetDal.editPreset(decoyUid, {
_id: first,
name: "newName",
config: { ads: "off" },
});
//THEN
const read = await PresetDal.getPresets(uid);
expect(read).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "first",
config: { ads: "sellout" },
}),
]),
);
});
it("should edit when partial is edited to full", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
})
).presetId;
//WHEN
await PresetDal.editPreset(uid, {
_id: first,
name: "newName",
settingGroups: null,
config: { ads: "off" },
});
//THEN
expect(await PresetDal.getPresets(uid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "newName",
config: { ads: "off" },
settingGroups: null,
}),
]),
);
});
it("should edit when full is edited to partial", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
config: {
ads: "off",
},
})
).presetId;
//WHEN
await PresetDal.editPreset(uid, {
_id: first,
name: "newName",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
});
//THEN
expect(await PresetDal.getPresets(uid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "newName",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
}),
]),
);
});
});
describe("removePreset", () => {
it("should fail if preset is unknown", async () => {
const uid = new ObjectId().toHexString();
await expect(() =>
PresetDal.removePreset(uid, new ObjectId().toHexString()),
).rejects.toThrow("Preset not found");
});
it("should remove", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const decoyUid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, { name: "first", config: {} })
).presetId;
const second = (
await PresetDal.addPreset(uid, {
name: "second",
config: { ads: "result" },
})
).presetId;
const decoy = (
await PresetDal.addPreset(decoyUid, {
name: "unknown",
config: { ads: "result" },
})
).presetId;
//WHEN
await PresetDal.removePreset(uid, first);
//THEN
const read = await PresetDal.getPresets(uid);
expect(read).toHaveLength(1);
expect(read).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(second),
uid: uid,
name: "second",
config: { ads: "result" },
}),
]),
);
expect(await PresetDal.getPresets(decoyUid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(decoy),
uid: decoyUid,
name: "unknown",
config: { ads: "result" },
}),
]),
);
});
it("should not remove present not matching uid", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const decoyUid = new ObjectId().toHexString();
const first = (
await PresetDal.addPreset(uid, {
name: "first",
config: { ads: "sellout" },
})
).presetId;
//WHEN
await expect(() =>
PresetDal.removePreset(decoyUid, first),
).rejects.toThrow("Preset not found");
//THEN
const read = await PresetDal.getPresets(uid);
expect(read).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(first),
uid: uid,
name: "first",
config: { ads: "sellout" },
}),
]),
);
});
});
describe("deleteAllPresets", () => {
it("should not fail if preset is unknown", async () => {
const uid = new ObjectId().toHexString();
await PresetDal.deleteAllPresets(uid);
});
it("should delete all", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
const decoyUid = new ObjectId().toHexString();
await PresetDal.addPreset(uid, { name: "first", config: {} });
await PresetDal.addPreset(uid, {
name: "second",
config: { ads: "result" },
});
const decoy = (
await PresetDal.addPreset(decoyUid, {
name: "unknown",
config: { ads: "result" },
})
).presetId;
//WHEN
await PresetDal.deleteAllPresets(uid);
//THEN
const read = await PresetDal.getPresets(uid);
expect(read).toHaveLength(0);
expect(await PresetDal.getPresets(decoyUid)).toEqual(
expect.arrayContaining([
expect.objectContaining({
_id: new ObjectId(decoy),
uid: decoyUid,
name: "unknown",
config: { ads: "result" },
}),
]),
);
});
});
});

View File

@@ -0,0 +1,26 @@
import { describe, it, expect } from "vitest";
import * as PublicDAL from "../../../src/dal/public";
describe("PublicDAL", function () {
it("should be able to update stats", async function () {
// checks it doesn't throw an error. the actual values are checked in another test.
await PublicDAL.updateStats(1, 15);
});
it("should be able to get typing stats", async function () {
const typingStats = await PublicDAL.getTypingStats();
expect(typingStats).toHaveProperty("testsCompleted");
expect(typingStats).toHaveProperty("testsStarted");
expect(typingStats).toHaveProperty("timeTyping");
});
it("should increment stats on update", async function () {
// checks that both functions are working on the same data in mongo
const priorStats = await PublicDAL.getTypingStats();
await PublicDAL.updateStats(1, 60);
const afterStats = await PublicDAL.getTypingStats();
expect(afterStats.testsCompleted).toBe(priorStats.testsCompleted + 1);
expect(afterStats.testsStarted).toBe(priorStats.testsStarted + 2);
expect(afterStats.timeTyping).toBe(priorStats.timeTyping + 60);
});
});

View File

@@ -0,0 +1,190 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import * as ResultDal from "../../../src/dal/result";
import { ObjectId } from "mongodb";
import * as UserDal from "../../../src/dal/user";
import { DBResult } from "../../../src/utils/result";
import * as ResultUtils from "../../../src/utils/result";
let uid: string;
const timestamp = Date.now() - 60000;
async function createDummyData(
uid: string,
count: number,
modify?: Partial<DBResult>,
): Promise<void> {
const dummyUser: UserDal.DBUser = {
_id: new ObjectId(),
uid,
addedAt: 0,
email: "test@example.com",
name: "Bob",
personalBests: {
time: {},
words: {},
quote: {},
custom: {},
zen: {},
},
};
vi.spyOn(UserDal, "getUser").mockResolvedValue(dummyUser);
for (let i = 0; i < count; i++) {
await ResultDal.addResult(uid, {
...{
_id: new ObjectId(),
wpm: i,
rawWpm: i,
charStats: [0, 0, 0, 0],
acc: 0,
mode: "time",
mode2: "10" as never,
quoteLength: 1,
timestamp,
restartCount: 0,
incompleteTestSeconds: 0,
incompleteTests: [],
testDuration: 10,
afkDuration: 0,
tags: [],
consistency: 100,
keyConsistency: 100,
chartData: { wpm: [], burst: [], err: [] },
uid,
keySpacingStats: { average: 0, sd: 0 },
keyDurationStats: { average: 0, sd: 0 },
difficulty: "normal",
language: "english",
isPb: false,
name: "Test",
funbox: ["58008", "read_ahead"],
},
...modify,
});
}
}
describe("ResultDal", () => {
const replaceLegacyValuesMock = vi.spyOn(ResultUtils, "replaceLegacyValues");
beforeEach(() => {
uid = new ObjectId().toHexString();
});
afterEach(async () => {
if (uid) await ResultDal.deleteAll(uid);
replaceLegacyValuesMock.mockClear();
});
describe("getResults", () => {
it("should read lastest 10 results ordered by timestamp", async () => {
//GIVEN
await createDummyData(uid, 10, { timestamp: timestamp - 2000 });
await createDummyData(uid, 20, { tags: ["current"] });
//WHEN
const results = await ResultDal.getResults(uid, { limit: 10 });
//THEN
expect(results).toHaveLength(10);
let last = results[0]?.timestamp as number;
results.forEach((it) => {
expect(it.tags).toContain("current");
expect(it.timestamp).toBeGreaterThanOrEqual(last);
last = it.timestamp;
});
});
it("should read all if not limited", async () => {
//GIVEN
await createDummyData(uid, 10, { timestamp: timestamp - 2000 });
await createDummyData(uid, 20);
//WHEN
const results = await ResultDal.getResults(uid, {});
//THEN
expect(results).toHaveLength(30);
});
it("should read results onOrAfterTimestamp", async () => {
//GIVEN
await createDummyData(uid, 10, { timestamp: timestamp - 2000 });
await createDummyData(uid, 20, { tags: ["current"] });
//WHEN
const results = await ResultDal.getResults(uid, {
onOrAfterTimestamp: timestamp,
});
//THEN
expect(results).toHaveLength(20);
results.forEach((it) => {
expect(it.tags).toContain("current");
});
});
it("should read next 10 results", async () => {
//GIVEN
await createDummyData(uid, 10, {
timestamp: timestamp - 2000,
tags: ["old"],
});
await createDummyData(uid, 20);
//WHEN
const results = await ResultDal.getResults(uid, {
limit: 10,
offset: 20,
});
//THEN
expect(results).toHaveLength(10);
results.forEach((it) => {
expect(it.tags).toContain("old");
});
});
it("should call replaceLegacyValues", async () => {
//GIVEN
await createDummyData(uid, 1);
//WHEN
await ResultDal.getResults(uid);
//THEN
expect(replaceLegacyValuesMock).toHaveBeenCalled();
});
});
describe("getResult", () => {
it("should call replaceLegacyValues", async () => {
//GIVEN
await createDummyData(uid, 1);
const resultId = (await ResultDal.getLastResult(uid))._id.toHexString();
//WHEN
await ResultDal.getResult(uid, resultId);
//THEN
expect(replaceLegacyValuesMock).toHaveBeenCalled();
});
});
describe("getLastResult", () => {
it("should call replaceLegacyValues", async () => {
//GIVEN
await createDummyData(uid, 1);
//WHEN
await ResultDal.getLastResult(uid);
//THEN
expect(replaceLegacyValuesMock).toHaveBeenCalled();
});
});
describe("getResultByTimestamp", () => {
it("should call replaceLegacyValues", async () => {
//GIVEN
await createDummyData(uid, 1);
//WHEN
await ResultDal.getResultByTimestamp(uid, timestamp);
//THEN
expect(replaceLegacyValuesMock).toHaveBeenCalled();
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,51 @@
import { GenericContainer, StartedTestContainer, Wait } from "testcontainers";
let startedMongoContainer: StartedTestContainer | undefined;
let startedRedisContainer: StartedTestContainer | undefined;
export async function setup(): Promise<void> {
process.env.TZ = "UTC";
//use testcontainer to start mongodb
console.log("\x1b[36mMongoDB starting...\x1b[0m");
const mongoContainer = new GenericContainer("mongo:5.0.13")
.withExposedPorts(27017)
.withWaitStrategy(Wait.forListeningPorts());
startedMongoContainer = await mongoContainer.start();
const mongoUrl = `mongodb://${startedMongoContainer?.getHost()}:${startedMongoContainer?.getMappedPort(
27017,
)}`;
process.env["TEST_DB_URL"] = mongoUrl;
console.log(`\x1b[32mMongoDB is running on ${mongoUrl}\x1b[0m`);
//use testcontainer to start redis
console.log("\x1b[36mRedis starting...\x1b[0m");
const redisContainer = new GenericContainer("redis:6.2.6")
.withExposedPorts(6379)
.withWaitStrategy(Wait.forLogMessage("Ready to accept connections"));
startedRedisContainer = await redisContainer.start();
const redisUrl = `redis://${startedRedisContainer.getHost()}:${startedRedisContainer.getMappedPort(
6379,
)}`;
process.env["REDIS_URI"] = redisUrl;
console.log(`\x1b[32mRedis is running on ${redisUrl}\x1b[0m`);
}
async function stopContainers(): Promise<void> {
console.log("\x1b[36mMongoDB stopping...\x1b[0m");
await startedMongoContainer?.stop();
console.log("\x1b[36mRedis stopping...\x1b[0m");
await startedRedisContainer?.stop();
console.log(`\x1b[32mContainers stopped.\x1b[0m`);
}
export async function teardown(): Promise<void> {
await stopContainers();
}
process.on("SIGTERM", stopContainers);
process.on("SIGINT", stopContainers);

View File

@@ -0,0 +1,11 @@
import { getConnection, connect } from "../../src/init/redis";
export async function redisSetup(): Promise<void> {
await connect();
}
export async function cleanupKeys(prefix: string): Promise<void> {
// oxlint-disable-next-line no-non-null-assertion
const connection = getConnection()!;
const keys = await connection.keys(`${prefix}*`);
await Promise.all(keys?.map((it) => connection.del(it)));
}

View File

@@ -0,0 +1,271 @@
import { describe, it, expect, beforeAll, afterEach } from "vitest";
import * as WeeklyXpLeaderboard from "../../../src/services/weekly-xp-leaderboard";
import { Configuration } from "@monkeytype/schemas/configuration";
import { ObjectId } from "mongodb";
import { RedisXpLeaderboardEntry } from "@monkeytype/schemas/leaderboards";
import { cleanupKeys, redisSetup } from "../redis";
const leaderboardsConfig: Configuration["leaderboards"]["weeklyXp"] = {
enabled: true,
expirationTimeInDays: 7,
xpRewardBrackets: [],
};
describe("Weekly XP Leaderboards", () => {
beforeAll(async () => {
await redisSetup();
});
afterEach(async () => {
await cleanupKeys(WeeklyXpLeaderboard.__testing.namespace);
});
describe("get", () => {
it("should get if enabled", () => {
expect(WeeklyXpLeaderboard.get(leaderboardsConfig)).toBeInstanceOf(
WeeklyXpLeaderboard.WeeklyXpLeaderboard,
);
});
it("should return null if disabled", () => {
expect(WeeklyXpLeaderboard.get({ enabled: false } as any)).toBeNull();
});
});
describe("WeeklyXpLeaderboard class", () => {
// oxlint-disable-next-line no-non-null-assertion
const lb = WeeklyXpLeaderboard.get(leaderboardsConfig)!;
describe("addResult", () => {
it("adds results for user", async () => {
//GIVEN
const user1 = await givenResult(100, { timeTypedSeconds: 5 });
await givenResult(50, { ...user1, timeTypedSeconds: 5 });
const user2 = await givenResult(100, {
isPremium: true,
timeTypedSeconds: 7,
});
//WHEN
const results = await lb.getResults(0, 10, leaderboardsConfig, true);
//THEN
expect(results).toEqual({
count: 2,
entries: [
{
...user1,
rank: 1,
timeTypedSeconds: 10,
totalXp: 150,
isPremium: false,
},
{
...user2,
rank: 2,
timeTypedSeconds: 7,
totalXp: 100,
isPremium: true,
},
],
});
});
});
describe("getResults", () => {
it("gets results", async () => {
//GIVEN
const user1 = await givenResult(150);
const user2 = await givenResult(100);
//WHEN
const results = await lb.getResults(0, 10, leaderboardsConfig, true);
//THEN
expect(results).toEqual({
count: 2,
entries: [
{ rank: 1, totalXp: 150, ...user1 },
{ rank: 2, totalXp: 100, ...user2 },
],
});
});
it("gets results for page", async () => {
//GIVEN
const _user1 = await givenResult(100);
const _user2 = await givenResult(75);
const user3 = await givenResult(50);
const user4 = await givenResult(25);
//WHEN
const results = await lb.getResults(1, 2, leaderboardsConfig, true);
//THEN
expect(results).toEqual({
count: 4,
entries: [
{ rank: 3, totalXp: 50, ...user3 },
{ rank: 4, totalXp: 25, ...user4 },
],
});
});
it("gets results without premium", async () => {
//GIVEN
const user1 = await givenResult(150, { isPremium: true });
const user2 = await givenResult(100);
//WHEN
const results = await lb.getResults(0, 10, leaderboardsConfig, false);
//THEN
expect(results).toEqual({
count: 2,
entries: [
{ rank: 1, totalXp: 150, ...user1, isPremium: undefined },
{ rank: 2, totalXp: 100, ...user2, isPremium: undefined },
],
});
});
it("gets results for friends only", async () => {
//GIVEN
const _user1 = await givenResult(100);
const user2 = await givenResult(75);
const _user3 = await givenResult(50);
const user4 = await givenResult(25);
//WHEN
const results = await lb.getResults(0, 5, leaderboardsConfig, true, [
user2.uid,
user4.uid,
new ObjectId().toHexString(),
]);
//THEN
expect(results).toEqual({
count: 2,
entries: [
{ rank: 2, friendsRank: 1, totalXp: 75, ...user2 },
{ rank: 4, friendsRank: 2, totalXp: 25, ...user4 },
],
});
});
it("gets results for friends only with page", async () => {
//GIVEN
const user1 = await givenResult(100);
const user2 = await givenResult(75);
const _user3 = await givenResult(50);
const user4 = await givenResult(25);
const _user5 = await givenResult(5);
//WHEN
const results = await lb.getResults(1, 2, leaderboardsConfig, true, [
user1.uid,
user2.uid,
user4.uid,
new ObjectId().toHexString(),
]);
//THEN
expect(results).toEqual({
count: 3,
entries: [{ rank: 4, friendsRank: 3, totalXp: 25, ...user4 }],
});
});
it("should return empty list if no friends", async () => {
//GIVEN
//WHEN
const results = await lb.getResults(0, 5, leaderboardsConfig, true, []);
//THEN
expect(results).toEqual({
count: 0,
entries: [],
});
});
});
describe("getRank", () => {
it("gets rank", async () => {
//GIVEN
const user1 = await givenResult(100);
const _user2 = await givenResult(150);
//WHEN
const rank = await lb.getRank(user1.uid, leaderboardsConfig);
//THEN
expect(rank).toEqual({ rank: 2, totalXp: 100, ...user1 });
});
it("should return null for unknown user", async () => {
expect(await lb.getRank("decoy", leaderboardsConfig)).toBeNull();
expect(
await lb.getRank("decoy", leaderboardsConfig, [
"unknown",
"unknown2",
]),
).toBeNull();
});
it("gets rank for friends", async () => {
//GIVEN
const user1 = await givenResult(50);
const user2 = await givenResult(60);
const _user3 = await givenResult(70);
const friends = [user1.uid, user2.uid, "decoy"];
//WHEN / THEN
expect(
await lb.getRank(user2.uid, leaderboardsConfig, friends),
).toEqual({ rank: 2, friendsRank: 1, totalXp: 60, ...user2 });
expect(
await lb.getRank(user1.uid, leaderboardsConfig, friends),
).toEqual({ rank: 3, friendsRank: 2, totalXp: 50, ...user1 });
});
});
it("purgeUserFromDailyLeaderboards", async () => {
//GIVEN
const cheater = await givenResult(50);
const validUser = await givenResult(1000);
//WHEN
await WeeklyXpLeaderboard.purgeUserFromXpLeaderboards(
cheater.uid,
leaderboardsConfig,
);
//THEN
expect(await lb.getRank(cheater.uid, leaderboardsConfig)).toBeNull();
expect(await lb.getResults(0, 50, leaderboardsConfig, true)).toEqual({
count: 1,
entries: [{ rank: 1, totalXp: 1000, ...validUser }],
});
});
async function givenResult(
xpGained: number,
entry?: Partial<RedisXpLeaderboardEntry>,
): Promise<RedisXpLeaderboardEntry> {
const uid = new ObjectId().toHexString();
const result: RedisXpLeaderboardEntry = {
uid,
name: `User ${uid}`,
lastActivityTimestamp: Date.now(),
timeTypedSeconds: 42,
badgeId: 2,
discordAvatar: `${uid}Avatar`,
discordId: `${uid}DiscordId`,
isPremium: false,
...entry,
};
await lb.addResult(leaderboardsConfig, { xpGained, entry: result });
return result;
}
});
});

View File

@@ -0,0 +1,46 @@
import { afterAll, beforeAll, afterEach, vi } from "vitest";
import { Collection, Db, MongoClient, WithId } from "mongodb";
import { setupCommonMocks } from "../setup-common-mocks";
import { getConnection } from "../../src/init/redis";
process.env["MODE"] = "dev";
let db: Db;
let client: MongoClient;
beforeAll(async () => {
client = new MongoClient(process.env["TEST_DB_URL"] as string);
await client.connect();
db = client.db();
vi.mock("../../src/init/db", () => ({
__esModule: true,
getDb: (): Db => db,
collection: <T>(name: string): Collection<WithId<T>> =>
db.collection<WithId<T>>(name),
close: () => {
//
},
}));
setupCommonMocks();
//we compare the time in mongodb to calculate premium status, so we have to use real time here
vi.useRealTimers();
});
afterEach(async () => {
//nothing
});
afterAll(async () => {
await client?.close();
// @ts-ignore
db = undefined;
//@ts-ignore
client = undefined;
await getConnection()?.quit();
vi.resetAllMocks();
});

View File

@@ -0,0 +1,404 @@
import { describe, it, expect, beforeAll, afterEach } from "vitest";
import { Mode, Mode2 } from "@monkeytype/schemas/shared";
import * as DailyLeaderboards from "../../../src/utils/daily-leaderboards";
import { cleanupKeys, redisSetup } from "../redis";
import { Language } from "@monkeytype/schemas/languages";
import { RedisDailyLeaderboardEntry } from "@monkeytype/schemas/leaderboards";
import { ObjectId } from "mongodb";
import { Configuration } from "@monkeytype/schemas/configuration";
const dailyLeaderboardsConfig: Configuration["dailyLeaderboards"] = {
enabled: true,
maxResults: 10,
leaderboardExpirationTimeInDays: 1,
validModeRules: [
{
language: "(english|spanish)",
mode: "time",
mode2: "(15|60)",
},
{
language: "french",
mode: "words",
mode2: "\\d+",
},
],
topResultsToAnnounce: 3,
xpRewardBrackets: [],
scheduleRewardsModeRules: [],
};
describe("Daily Leaderboards", () => {
beforeAll(async () => {
await redisSetup();
});
afterEach(async () => {
await cleanupKeys(DailyLeaderboards.__testing.namespace);
});
describe("should properly handle valid and invalid modes", () => {
const testCases: {
language: Language;
mode: Mode;
mode2: Mode2<any>;
expected: boolean;
}[] = [
{
language: "english",
mode: "time",
mode2: "60",
expected: true,
},
{
language: "spanish",
mode: "time",
mode2: "15",
expected: true,
},
{
language: "english",
mode: "time",
mode2: "600",
expected: false,
},
{
language: "spanish",
mode: "words",
mode2: "150",
expected: false,
},
{
language: "french",
mode: "time",
mode2: "600",
expected: false,
},
{
language: "french",
mode: "words",
mode2: "100",
expected: true,
},
];
it.for(testCases)(
`language=$language, mode=$mode mode2=$mode2 expect $expected`,
({ language, mode, mode2, expected }) => {
const result = DailyLeaderboards.getDailyLeaderboard(
language,
mode,
mode2 as any,
dailyLeaderboardsConfig,
);
expect(!!result).toBe(expected);
},
);
});
describe("DailyLeaderboard class", () => {
// oxlint-disable-next-line no-non-null-assertion
const lb = DailyLeaderboards.getDailyLeaderboard(
"english",
"time",
"60",
dailyLeaderboardsConfig,
)!;
describe("addResult", () => {
it("adds best result for user", async () => {
//GIVEN
const uid = new ObjectId().toHexString();
await givenResult({ uid, wpm: 50 });
const bestResult = await givenResult({ uid, wpm: 55 });
await givenResult({ uid, wpm: 53 });
const user2 = await givenResult({ wpm: 20 });
//WHEN
const results = await lb.getResults(
0,
5,
dailyLeaderboardsConfig,
true,
);
//THEN
expect(results).toEqual({
count: 2,
minWpm: 20,
entries: [
{ rank: 1, ...bestResult },
{ rank: 2, ...user2 },
],
});
});
it("limits max amount of results", async () => {
//GIVEN
const maxResults = dailyLeaderboardsConfig.maxResults;
const bob = await givenResult({ wpm: 10 });
await Promise.all(
new Array(maxResults - 1)
.fill(0)
.map(() => givenResult({ wpm: 20 + Math.random() * 100 })),
);
expect(
await lb.getResults(0, 5, dailyLeaderboardsConfig, true),
).toEqual(expect.objectContaining({ count: maxResults }));
expect(await lb.getRank(bob.uid, dailyLeaderboardsConfig)).toEqual({
rank: maxResults,
...bob,
});
//WHEN
await givenResult({ wpm: 11 });
//THEN
//max count is still the same, but bob is no longer on the leaderboard
expect(
await lb.getResults(0, 5, dailyLeaderboardsConfig, true),
).toEqual(expect.objectContaining({ count: maxResults }));
expect(await lb.getRank(bob.uid, dailyLeaderboardsConfig)).toBeNull();
});
});
describe("getResults", () => {
it("gets result", async () => {
//GIVEN
const user1 = await givenResult({ wpm: 50, isPremium: true });
const user2 = await givenResult({ wpm: 60 });
const user3 = await givenResult({ wpm: 40 });
//WHEN
const results = await lb.getResults(
0,
5,
dailyLeaderboardsConfig,
true,
);
//THEN
expect(results).toEqual({
count: 3,
minWpm: 40,
entries: [
{ rank: 1, ...user2 },
{ rank: 2, ...user1 },
{ rank: 3, ...user3 },
],
});
});
it("gets result for page", async () => {
//GIVEN
const user4 = await givenResult({ wpm: 45 });
const _user5 = await givenResult({ wpm: 20 });
const _user1 = await givenResult({ wpm: 50 });
const _user2 = await givenResult({ wpm: 60 });
const user3 = await givenResult({ wpm: 40 });
//WHEN
const results = await lb.getResults(
1,
2,
dailyLeaderboardsConfig,
true,
);
//THEN
expect(results).toEqual({
count: 5,
minWpm: 20,
entries: [
{ rank: 3, ...user4 },
{ rank: 4, ...user3 },
],
});
});
it("gets result without premium", async () => {
//GIVEN
const user1 = await givenResult({ wpm: 50, isPremium: true });
const user2 = await givenResult({ wpm: 60 });
const user3 = await givenResult({ wpm: 40, isPremium: true });
//WHEN
const results = await lb.getResults(
0,
5,
dailyLeaderboardsConfig,
false,
);
//THEN
expect(results).toEqual({
count: 3,
minWpm: 40,
entries: [
{ rank: 1, ...user2, isPremium: undefined },
{ rank: 2, ...user1, isPremium: undefined },
{ rank: 3, ...user3, isPremium: undefined },
],
});
});
it("should get for friends only", async () => {
//GIVEN
const _user1 = await givenResult({ wpm: 90 });
const user2 = await givenResult({ wpm: 80 });
const _user3 = await givenResult({ wpm: 70 });
const user4 = await givenResult({ wpm: 60 });
const _user5 = await givenResult({ wpm: 50 });
//WHEN
const results = await lb.getResults(
0,
5,
dailyLeaderboardsConfig,
true,
[user2.uid, user4.uid, new ObjectId().toHexString()],
);
//THEN
expect(results).toEqual({
count: 2,
minWpm: 60,
entries: [
{ rank: 2, friendsRank: 1, ...user2 },
{ rank: 4, friendsRank: 2, ...user4 },
],
});
});
it("should get for friends only with page", async () => {
//GIVEN
const user1 = await givenResult({ wpm: 105 });
const user2 = await givenResult({ wpm: 100 });
const _user3 = await givenResult({ wpm: 95 });
const user4 = await givenResult({ wpm: 90 });
const _user5 = await givenResult({ wpm: 70 });
//WHEN
const results = await lb.getResults(
1,
2,
dailyLeaderboardsConfig,
true,
[user1.uid, user2.uid, user4.uid, new ObjectId().toHexString()],
);
//THEN
expect(results).toEqual({
count: 3,
minWpm: 90,
entries: [{ rank: 4, friendsRank: 3, ...user4 }],
});
});
it("should return empty list if no friends", async () => {
//GIVEN
//WHEN
const results = await lb.getResults(
0,
5,
dailyLeaderboardsConfig,
true,
[],
);
//THEN
expect(results).toEqual({
count: 0,
minWpm: 0,
entries: [],
});
});
});
describe("getRank", () => {
it("gets rank", async () => {
//GIVEN
const user1 = await givenResult({ wpm: 50 });
const user2 = await givenResult({ wpm: 60 });
//WHEN / THEN
expect(await lb.getRank(user1.uid, dailyLeaderboardsConfig)).toEqual({
rank: 2,
...user1,
});
expect(await lb.getRank(user2.uid, dailyLeaderboardsConfig)).toEqual({
rank: 1,
...user2,
});
});
it("should return null for unknown user", async () => {
expect(await lb.getRank("decoy", dailyLeaderboardsConfig)).toBeNull();
expect(
await lb.getRank("decoy", dailyLeaderboardsConfig, [
"unknown",
"unknown2",
]),
).toBeNull();
});
it("gets rank for friends", async () => {
//GIVEN
const user1 = await givenResult({ wpm: 50 });
const user2 = await givenResult({ wpm: 60 });
const _user3 = await givenResult({ wpm: 70 });
const friends = [user1.uid, user2.uid, "decoy"];
//WHEN / THEN
expect(
await lb.getRank(user2.uid, dailyLeaderboardsConfig, friends),
).toEqual({ rank: 2, friendsRank: 1, ...user2 });
expect(
await lb.getRank(user1.uid, dailyLeaderboardsConfig, friends),
).toEqual({ rank: 3, friendsRank: 2, ...user1 });
});
});
it("purgeUserFromDailyLeaderboards", async () => {
//GIVEN
const cheater = await givenResult({ wpm: 50 });
const user1 = await givenResult({ wpm: 60 });
const user2 = await givenResult({ wpm: 40 });
//WHEN
await DailyLeaderboards.purgeUserFromDailyLeaderboards(
cheater.uid,
dailyLeaderboardsConfig,
);
//THEN
expect(await lb.getRank(cheater.uid, dailyLeaderboardsConfig)).toBeNull();
expect(await lb.getResults(0, 50, dailyLeaderboardsConfig, true)).toEqual(
{
count: 2,
minWpm: 40,
entries: [
{ rank: 1, ...user1 },
{ rank: 2, ...user2 },
],
},
);
});
async function givenResult(
entry?: Partial<RedisDailyLeaderboardEntry>,
): Promise<RedisDailyLeaderboardEntry> {
const uid = new ObjectId().toHexString();
const result = {
acc: 85,
name: `User ${uid}`,
raw: 100,
wpm: 95,
timestamp: Date.now(),
uid: uid,
badgeId: 2,
consistency: 90,
discordAvatar: `${uid}Avatar`,
discordId: `${uid}DiscordId`,
isPremium: false,
...entry,
};
await lb.addResult(result, dailyLeaderboardsConfig);
return result;
}
});
});

View File

@@ -0,0 +1,83 @@
import { expect, vi } from "vitest";
import { Configuration } from "@monkeytype/schemas/configuration";
import { randomBytes } from "crypto";
import { hash } from "bcrypt";
import { ObjectId } from "mongodb";
import { base64UrlEncode } from "../../src/utils/misc";
import * as ApeKeyDal from "../../src/dal/ape-keys";
import { DecodedIdToken } from "firebase-admin/auth";
import * as AuthUtils from "../../src/utils/auth";
export async function mockAuthenticateWithApeKey(
uid: string,
config: Configuration,
): Promise<string> {
if (!config.apeKeys.acceptKeys) {
throw Error("config.apeKeys.acceptedKeys needs to be set to true");
}
const { apeKeyBytes, apeKeySaltRounds } = config.apeKeys;
const apiKey = randomBytes(apeKeyBytes).toString("base64url");
const saltyHash = await hash(apiKey, apeKeySaltRounds);
const apeKey: ApeKeyDal.DBApeKey = {
_id: new ObjectId(),
name: "bob",
enabled: true,
uid,
hash: saltyHash,
createdOn: Date.now(),
modifiedOn: Date.now(),
lastUsedOn: -1,
useCount: 0,
};
const apeKeyId = new ObjectId().toHexString();
vi.spyOn(ApeKeyDal, "getApeKey").mockResolvedValue(apeKey);
vi.spyOn(ApeKeyDal, "updateLastUsedOn").mockResolvedValue();
return base64UrlEncode(`${apeKeyId}.${apiKey}`);
}
export function mockBearerAuthentication(uid: string) {
const mockDecodedToken = {
uid,
email: "newuser@mail.com",
iat: Date.now(),
} as DecodedIdToken;
const verifyIdTokenMock = vi.spyOn(AuthUtils, "verifyIdToken");
return {
/**
* Reset the mock and return a default token. Call this method in the `beforeEach` of all tests.
*/
beforeEach: (): void => {
verifyIdTokenMock.mockClear();
verifyIdTokenMock.mockResolvedValue(mockDecodedToken);
},
/**
* Reset the mock results in the authentication to fail.
*/
noAuth: (): void => {
verifyIdTokenMock.mockClear();
},
/**
* verify the authentication has been called
*/
expectToHaveBeenCalled: (): void => {
expect(verifyIdTokenMock).toHaveBeenCalled();
},
/**
* modify the token returned by the mock. This can be used to e.g. return a stale token.
* @param customize
*/
modifyToken: (customize: Partial<DecodedIdToken>): void => {
verifyIdTokenMock.mockClear();
verifyIdTokenMock.mockResolvedValue({
...mockDecodedToken,
...customize,
});
},
};
}

View File

@@ -0,0 +1,24 @@
import { ObjectId } from "mongodb";
import * as ConnectionsDal from "../../src/dal/connections";
export async function createConnection(
data: Partial<ConnectionsDal.DBConnection>,
maxPerUser = 25,
): Promise<ConnectionsDal.DBConnection> {
const defaultName = "user" + new ObjectId().toHexString();
const result = await ConnectionsDal.create(
{
uid: data.initiatorUid ?? new ObjectId().toHexString(),
name: data.initiatorName ?? defaultName,
},
{
uid: data.receiverUid ?? new ObjectId().toHexString(),
name: data.receiverName ?? defaultName,
},
maxPerUser,
);
await ConnectionsDal.__testing
.getCollection()
.updateOne({ _id: result._id }, { $set: data });
return { ...result, ...data };
}

View File

@@ -0,0 +1,17 @@
import request from "supertest";
import app from "../../src/app";
import { ObjectId } from "mongodb";
import { mockBearerAuthentication } from "./auth";
import { beforeEach } from "vitest";
export function setup() {
const mockApp = request(app);
const uid = new ObjectId().toHexString();
const mockAuth = mockBearerAuthentication(uid);
beforeEach(() => {
mockAuth.beforeEach();
});
return { mockApp, uid, mockAuth };
}

View File

@@ -0,0 +1,21 @@
import { expect } from "vitest";
import MonkeyError from "../../src/utils/error";
import { MatcherResult } from "../vitest";
export function enableMonkeyErrorExpects(): void {
expect.extend({
toMatchMonkeyError(
received: MonkeyError,
expected: MonkeyError,
): MatcherResult {
return {
pass:
received.status === expected.status &&
received.message === expected.message,
message: () => "MonkeyError does not match:",
actual: { status: received.status, message: received.message },
expected: { status: expected.status, message: expected.message },
};
},
});
}

View File

@@ -0,0 +1,30 @@
import { expect } from "vitest";
import { REQUEST_MULTIPLIER } from "../../src/middlewares/rate-limit";
import { MatcherResult, ExpectedRateLimit } from "../vitest";
import { Test as SuperTest } from "supertest";
export function enableRateLimitExpects(): void {
expect.extend({
toBeRateLimited: async (
received: SuperTest,
expected: ExpectedRateLimit,
): Promise<MatcherResult> => {
const now = Date.now();
const { headers } = await received.expect(200);
const max =
parseInt(headers["x-ratelimit-limit"] as string) / REQUEST_MULTIPLIER;
const windowMs =
parseInt(headers["x-ratelimit-reset"] as string) * 1000 - now;
return {
pass:
max === expected.max && Math.abs(expected.windowMs - windowMs) < 2500,
message: () =>
"Rate limit max not matching or windowMs is off by more then 2500ms",
actual: { max, windowMs },
expected: expected,
};
},
});
}

View File

@@ -0,0 +1,45 @@
import * as DB from "../../src/init/db";
import * as UserDAL from "../../src/dal/user";
import { ObjectId } from "mongodb";
import { PersonalBest } from "@monkeytype/schemas/shared";
export async function createUser(
user?: Partial<UserDAL.DBUser>,
): Promise<UserDAL.DBUser> {
const uid = new ObjectId().toHexString();
await UserDAL.addUser("user" + uid, uid + "@example.com", uid);
await DB.collection("users").updateOne({ uid }, { $set: { ...user } });
return await UserDAL.getUser(uid, "test");
}
export async function createUserWithoutMigration(
user?: Partial<UserDAL.DBUser>,
): Promise<UserDAL.DBUser> {
const uid = new ObjectId().toHexString();
await UserDAL.addUser("user" + uid, uid + "@example.com", uid);
await DB.collection("users").updateOne({ uid }, { $set: { ...user } });
await DB.collection("users").updateOne(
{ uid },
{ $unset: { testActivity: "" } },
);
return await UserDAL.getUser(uid, "test");
}
export function pb(
wpm: number,
acc: number = 90,
timestamp: number = 1,
): PersonalBest {
return {
acc,
consistency: 100,
difficulty: "normal",
lazyMode: false,
language: "english",
punctuation: false,
raw: wpm + 1,
wpm,
timestamp,
};
}

View File

@@ -0,0 +1,580 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import { ObjectId } from "mongodb";
import * as Configuration from "../../../src/init/configuration";
import * as AdminUuidDal from "../../../src/dal/admin-uids";
import * as UserDal from "../../../src/dal/user";
import * as ReportDal from "../../../src/dal/report";
import * as LogsDal from "../../../src/dal/logs";
import GeorgeQueue from "../../../src/queues/george-queue";
import * as AuthUtil from "../../../src/utils/auth";
import { enableRateLimitExpects } from "../../__testData__/rate-limit";
import Test from "supertest/lib/test";
const { mockApp, uid } = setup();
const configuration = Configuration.getCachedConfiguration();
enableRateLimitExpects();
describe("AdminController", () => {
const isAdminMock = vi.spyOn(AdminUuidDal, "isAdmin");
const logsAddImportantLog = vi.spyOn(LogsDal, "addImportantLog");
beforeEach(async () => {
isAdminMock.mockClear();
await enableAdminEndpoints(true);
isAdminMock.mockResolvedValue(true);
logsAddImportantLog.mockClear().mockResolvedValue();
});
describe("check for admin", () => {
it("should succeed if user is admin", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.get("/admin")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toEqual({
message: "OK",
data: null,
});
expect(isAdminMock).toHaveBeenCalledWith(uid);
});
it("should fail if user is no admin", async () => {
await expectFailForNonAdmin(
mockApp.get("/admin").set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if admin endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp.get("/admin").set("Authorization", `Bearer ${uid}`),
);
});
it("should be rate limited", async () => {
await expect(
mockApp.get("/admin").set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 1, windowMs: 5000 });
});
});
describe("toggle ban", () => {
const userBannedMock = vi.spyOn(UserDal, "setBanned");
const georgeBannedMock = vi.spyOn(GeorgeQueue, "userBanned");
const getUserMock = vi.spyOn(UserDal, "getPartialUser");
beforeEach(() => {
[userBannedMock, georgeBannedMock, getUserMock].forEach((it) =>
it.mockClear(),
);
userBannedMock.mockResolvedValue();
});
it("should ban user with discordId", async () => {
//GIVEN
const victimUid = new ObjectId().toHexString();
getUserMock.mockResolvedValue({
banned: false,
discordId: "discordId",
} as any);
//WHEN
const { body } = await mockApp
.post("/admin/toggleBan")
.send({ uid: victimUid })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toEqual({
message: "Ban toggled",
data: { banned: true },
});
expect(getUserMock).toHaveBeenCalledWith(victimUid, "toggle ban", [
"banned",
"discordId",
]);
expect(userBannedMock).toHaveBeenCalledWith(victimUid, true);
expect(georgeBannedMock).toHaveBeenCalledWith("discordId", true);
});
it("should unban user without discordId", async () => {
//GIVEN
const victimUid = new ObjectId().toHexString();
getUserMock.mockResolvedValue({
banned: true,
} as any);
//WHEN
const { body } = await mockApp
.post("/admin/toggleBan")
.send({ uid: victimUid })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toEqual({
message: "Ban toggled",
data: { banned: false },
});
expect(getUserMock).toHaveBeenCalledWith(victimUid, "toggle ban", [
"banned",
"discordId",
]);
expect(userBannedMock).toHaveBeenCalledWith(victimUid, false);
expect(georgeBannedMock).not.toHaveBeenCalled();
});
it("should fail without mandatory properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/admin/toggleBan")
.send({})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"uid" Required'],
});
});
it("should fail with unknown properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/admin/toggleBan")
.send({ uid: new ObjectId().toHexString(), extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if user is no admin", async () => {
await expectFailForNonAdmin(
mockApp
.post("/admin/toggleBan")
.send({ uid: new ObjectId().toHexString() })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if admin endpoints are disabled", async () => {
//GIVEN
await expectFailForDisabledEndpoint(
mockApp
.post("/admin/toggleBan")
.send({ uid: new ObjectId().toHexString() })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should be rate limited", async () => {
//GIVEN
const victimUid = new ObjectId().toHexString();
getUserMock.mockResolvedValue({
banned: false,
discordId: "discordId",
} as any);
//WHEN
await expect(
mockApp
.post("/admin/toggleBan")
.send({ uid: victimUid })
.set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 1, windowMs: 5000 });
});
});
describe("clear streak hour offset", () => {
const clearStreakHourOffset = vi.spyOn(UserDal, "clearStreakHourOffset");
beforeEach(() => {
clearStreakHourOffset.mockClear();
clearStreakHourOffset.mockResolvedValue();
});
it("should clear streak hour offset for user", async () => {
//GIVEN
const victimUid = new ObjectId().toHexString();
//WHEN
const { body } = await mockApp
.post("/admin/clearStreakHourOffset")
.send({ uid: victimUid })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toEqual({
message: "Streak hour offset cleared",
data: null,
});
expect(clearStreakHourOffset).toHaveBeenCalledWith(victimUid);
});
it("should fail without mandatory properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/admin/clearStreakHourOffset")
.send({})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"uid" Required'],
});
});
it("should fail with unknown properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/admin/clearStreakHourOffset")
.send({ uid: new ObjectId().toHexString(), extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if user is no admin", async () => {
await expectFailForNonAdmin(
mockApp
.post("/admin/clearStreakHourOffset")
.send({ uid: new ObjectId().toHexString() })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if admin endpoints are disabled", async () => {
//GIVEN
await expectFailForDisabledEndpoint(
mockApp
.post("/admin/clearStreakHourOffset")
.send({ uid: new ObjectId().toHexString() })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should be rate limited", async () => {
//GIVEN
const victimUid = new ObjectId().toHexString();
//WHEN
await expect(
mockApp
.post("/admin/clearStreakHourOffset")
.send({ uid: victimUid })
.set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 1, windowMs: 5000 });
});
});
describe("accept reports", () => {
const getReportsMock = vi.spyOn(ReportDal, "getReports");
const deleteReportsMock = vi.spyOn(ReportDal, "deleteReports");
const addToInboxMock = vi.spyOn(UserDal, "addToInbox");
beforeEach(() => {
[getReportsMock, deleteReportsMock, addToInboxMock].forEach((it) =>
it.mockClear(),
);
deleteReportsMock.mockResolvedValue();
});
it("should accept reports", async () => {
//GIVEN
const reportOne = {
id: "1",
reason: "one",
} as any as ReportDal.DBReport;
const reportTwo = {
id: "2",
reason: "two",
} as any as ReportDal.DBReport;
getReportsMock.mockResolvedValue([reportOne, reportTwo]);
//WHEN
const { body } = await mockApp
.post("/admin/report/accept")
.send({
reports: [{ reportId: reportOne.id }, { reportId: reportTwo.id }],
})
.set("Authorization", `Bearer ${uid}`)
.expect(200);
expect(body).toEqual({
message: "Reports removed and users notified.",
data: null,
});
expect(addToInboxMock).toHaveBeenCalledTimes(2);
expect(deleteReportsMock).toHaveBeenCalledWith(["1", "2"]);
});
it("should fail wihtout mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/admin/report/accept")
.send({})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"reports" Required'],
});
});
it("should fail with empty reports", async () => {
//WHEN
const { body } = await mockApp
.post("/admin/report/accept")
.send({ reports: [] })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"reports" Array must contain at least 1 element(s)',
],
});
});
it("should fail with unknown properties", async () => {
//WHEN
const { body } = await mockApp
.post("/admin/report/accept")
.send({ reports: [{ reportId: "1", extra2: "value" }], extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
`"reports.0" Unrecognized key(s) in object: 'extra2'`,
"Unrecognized key(s) in object: 'extra'",
],
});
});
it("should fail if user is no admin", async () => {
await expectFailForNonAdmin(
mockApp
.post("/admin/report/accept")
.send({ reports: [] })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if admin endpoints are disabled", async () => {
//GIVEN
await expectFailForDisabledEndpoint(
mockApp
.post("/admin/report/accept")
.send({ reports: [] })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should be rate limited", async () => {
//GIVEN
getReportsMock.mockResolvedValue([{ id: "1", reason: "one" } as any]);
//WHEN
await expect(
mockApp
.post("/admin/report/accept")
.send({ reports: [{ reportId: "1" }] })
.set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 1, windowMs: 5000 });
});
});
describe("reject reports", () => {
const getReportsMock = vi.spyOn(ReportDal, "getReports");
const deleteReportsMock = vi.spyOn(ReportDal, "deleteReports");
const addToInboxMock = vi.spyOn(UserDal, "addToInbox");
beforeEach(() => {
[getReportsMock, deleteReportsMock, addToInboxMock].forEach((it) => {
it.mockClear();
deleteReportsMock.mockResolvedValue();
});
});
it("should reject reports", async () => {
//GIVEN
const reportOne = {
id: "1",
reason: "one",
} as any as ReportDal.DBReport;
const reportTwo = {
id: "2",
reason: "two",
} as any as ReportDal.DBReport;
getReportsMock.mockResolvedValue([reportOne, reportTwo]);
//WHEN
const { body } = await mockApp
.post("/admin/report/reject")
.send({
reports: [
{ reportId: reportOne.id, reason: "test" },
{ reportId: reportTwo.id },
],
})
.set("Authorization", `Bearer ${uid}`)
.expect(200);
expect(body).toEqual({
message: "Reports removed and users notified.",
data: null,
});
expect(addToInboxMock).toHaveBeenCalledTimes(2);
expect(deleteReportsMock).toHaveBeenCalledWith(["1", "2"]);
});
it("should fail wihtout mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/admin/report/reject")
.send({})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"reports" Required'],
});
});
it("should fail with empty reports", async () => {
//WHEN
const { body } = await mockApp
.post("/admin/report/reject")
.send({ reports: [] })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"reports" Array must contain at least 1 element(s)',
],
});
});
it("should fail with unknown properties", async () => {
//WHEN
const { body } = await mockApp
.post("/admin/report/reject")
.send({ reports: [{ reportId: "1", extra2: "value" }], extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
`"reports.0" Unrecognized key(s) in object: 'extra2'`,
"Unrecognized key(s) in object: 'extra'",
],
});
});
it("should fail if user is no admin", async () => {
await expectFailForNonAdmin(
mockApp
.post("/admin/report/reject")
.send({ reports: [] })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if admin endpoints are disabled", async () => {
//GIVEN
await expectFailForDisabledEndpoint(
mockApp
.post("/admin/report/reject")
.send({ reports: [] })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should be rate limited", async () => {
//GIVEN
getReportsMock.mockResolvedValue([{ id: "1", reason: "one" } as any]);
//WHEN
await expect(
mockApp
.post("/admin/report/reject")
.send({ reports: [{ reportId: "1" }] })
.set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 1, windowMs: 5000 });
});
});
describe("send forgot password email", () => {
const sendForgotPasswordEmailMock = vi.spyOn(
AuthUtil,
"sendForgotPasswordEmail",
);
beforeEach(() => {
sendForgotPasswordEmailMock.mockClear();
});
it("should send forgot password link", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/admin/sendForgotPasswordEmail")
.send({ email: "meowdec@example.com" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toEqual({
message: "Password reset request email sent.",
data: null,
});
expect(sendForgotPasswordEmailMock).toHaveBeenCalledWith(
"meowdec@example.com",
);
});
it("should be rate limited", async () => {
//WHEN
await expect(
mockApp
.post("/admin/sendForgotPasswordEmail")
.send({ email: "meowdec@example.com" })
.set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 1, windowMs: 5000 });
});
});
async function expectFailForNonAdmin(call: Test): Promise<void> {
isAdminMock.mockResolvedValue(false);
const { body } = await call.expect(403);
expect(body.message).toEqual("You don't have permission to do this.");
}
async function expectFailForDisabledEndpoint(call: Test): Promise<void> {
await enableAdminEndpoints(false);
const { body } = await call.expect(503);
expect(body.message).toEqual("Admin endpoints are currently disabled.");
}
});
async function enableAdminEndpoints(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.admin = { ...mockConfig.admin, endpointsEnabled: enabled };
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}

View File

@@ -0,0 +1,373 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import { Test as SuperTest } from "supertest";
import * as ApeKeyDal from "../../../src/dal/ape-keys";
import { ObjectId } from "mongodb";
import * as Configuration from "../../../src/init/configuration";
import * as UserDal from "../../../src/dal/user";
const { mockApp, uid } = setup();
const configuration = Configuration.getCachedConfiguration();
describe("ApeKeyController", () => {
const getUserMock = vi.spyOn(UserDal, "getPartialUser");
beforeEach(async () => {
await enableApeKeysEndpoints(true);
getUserMock.mockResolvedValue(user(uid, {}));
vi.useFakeTimers();
vi.setSystemTime(1000);
});
afterEach(() => {
getUserMock.mockClear();
vi.useRealTimers();
});
describe("get ape keys", () => {
const getApeKeysMock = vi.spyOn(ApeKeyDal, "getApeKeys");
afterEach(() => {
getApeKeysMock.mockClear();
});
it("should get the users config", async () => {
//GIVEN
const keyOne = apeKeyDb(uid);
const keyTwo = apeKeyDb(uid);
getApeKeysMock.mockResolvedValue([keyOne, keyTwo]);
//WHEN
const { body } = await mockApp
.get("/ape-keys")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toHaveProperty("message", "ApeKeys retrieved");
expect(body.data).toHaveProperty(keyOne._id.toHexString(), {
name: keyOne.name,
enabled: keyOne.enabled,
createdOn: keyOne.createdOn,
modifiedOn: keyOne.modifiedOn,
lastUsedOn: keyOne.lastUsedOn,
});
expect(body.data).toHaveProperty(keyTwo._id.toHexString(), {
name: keyTwo.name,
enabled: keyTwo.enabled,
createdOn: keyTwo.createdOn,
modifiedOn: keyTwo.modifiedOn,
lastUsedOn: keyTwo.lastUsedOn,
});
expect(body.data).keys([keyOne._id, keyTwo._id]);
expect(getApeKeysMock).toHaveBeenCalledWith(uid);
});
it("should fail if apeKeys endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp.get("/ape-keys").set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if user has no apeKey permissions", async () => {
await expectFailForNoPermissions(
mockApp.get("/ape-keys").set("Authorization", `Bearer ${uid}`),
);
});
});
describe("add ape key", () => {
const addApeKeyMock = vi.spyOn(ApeKeyDal, "addApeKey");
const countApeKeysMock = vi.spyOn(ApeKeyDal, "countApeKeysForUser");
beforeEach(() => {
countApeKeysMock.mockResolvedValue(0);
});
afterEach(() => {
addApeKeyMock.mockClear();
countApeKeysMock.mockClear();
});
it("should add ape key", async () => {
//GIVEN
addApeKeyMock.mockResolvedValue("1");
//WHEN
const { body } = await mockApp
.post("/ape-keys")
.set("Authorization", `Bearer ${uid}`)
.send({ name: "test", enabled: true })
.expect(200);
expect(body.message).toEqual("ApeKey generated");
expect(body.data).keys("apeKey", "apeKeyDetails", "apeKeyId");
expect(body.data.apeKey).not.toBeNull();
expect(body.data.apeKeyDetails).toStrictEqual({
createdOn: 1000,
enabled: true,
lastUsedOn: -1,
modifiedOn: 1000,
name: "test",
});
expect(body.data.apeKeyId).toEqual("1");
expect(addApeKeyMock).toHaveBeenCalledWith(
expect.objectContaining({
createdOn: 1000,
enabled: true,
lastUsedOn: -1,
modifiedOn: 1000,
name: "test",
uid: uid,
useCount: 0,
}),
);
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/ape-keys")
.send({})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`"name" Required`, `"enabled" Required`],
});
});
it("should fail with extra properties", async () => {
//WHEN
const { body } = await mockApp
.post("/ape-keys")
.send({ name: "test", enabled: true, extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if max apeKeys is reached", async () => {
//GIVEN
countApeKeysMock.mockResolvedValue(1);
//WHEN
const { body } = await mockApp
.post("/ape-keys")
.send({ name: "test", enabled: false })
.set("Authorization", `Bearer ${uid}`)
.expect(409);
//THEN
expect(body.message).toEqual(
"Maximum number of ApeKeys have been generated",
);
});
it("should fail if apeKeys endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp
.post("/ape-keys")
.send({ name: "test", enabled: false })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if user has no apeKey permissions", async () => {
await expectFailForNoPermissions(
mockApp
.post("/ape-keys")
.send({ name: "test", enabled: false })
.set("Authorization", `Bearer ${uid}`),
);
});
});
describe("edit ape key", () => {
const editApeKeyMock = vi.spyOn(ApeKeyDal, "editApeKey");
const apeKeyId = new ObjectId().toHexString();
afterEach(() => {
editApeKeyMock.mockClear();
});
it("should edit ape key", async () => {
//GIVEN
editApeKeyMock.mockResolvedValue();
//WHEN
const { body } = await mockApp
.patch(`/ape-keys/${apeKeyId}`)
.send({ name: "new", enabled: false })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body.message).toEqual("ApeKey updated");
expect(editApeKeyMock).toHaveBeenCalledWith(uid, apeKeyId, "new", false);
});
it("should edit ape key with single property", async () => {
//GIVEN
editApeKeyMock.mockResolvedValue();
//WHEN
const { body } = await mockApp
.patch(`/ape-keys/${apeKeyId}`)
.send({ name: "new" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body.message).toEqual("ApeKey updated");
expect(editApeKeyMock).toHaveBeenCalledWith(
uid,
apeKeyId,
"new",
undefined,
);
});
it("should fail with missing path", async () => {
//GIVEN
//WHEN
await mockApp
.patch(`/ape-keys/`)
.set("Authorization", `Bearer ${uid}`)
.expect(404);
});
it("should fail with extra properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.patch(`/ape-keys/${apeKeyId}`)
.send({ name: "new", extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if apeKeys endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp
.patch(`/ape-keys/${apeKeyId}`)
.send({ name: "test", enabled: false })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if user has no apeKey permissions", async () => {
await expectFailForNoPermissions(
mockApp
.patch(`/ape-keys/${apeKeyId}`)
.send({ name: "test", enabled: false })
.set("Authorization", `Bearer ${uid}`),
);
});
});
describe("delete ape key", () => {
const deleteApeKeyMock = vi.spyOn(ApeKeyDal, "deleteApeKey");
const apeKeyId = new ObjectId().toHexString();
afterEach(() => {
deleteApeKeyMock.mockClear();
});
it("should delete ape key", async () => {
//GIVEN
deleteApeKeyMock.mockResolvedValue();
//WHEN
const { body } = await mockApp
.delete(`/ape-keys/${apeKeyId}`)
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body.message).toEqual("ApeKey deleted");
expect(deleteApeKeyMock).toHaveBeenCalledWith(uid, apeKeyId);
});
it("should fail with missing path", async () => {
//GIVEN
//WHEN
await mockApp
.delete(`/ape-keys/`)
.set("Authorization", `Bearer ${uid}`)
.expect(404);
});
it("should fail if apeKeys endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp
.delete(`/ape-keys/${apeKeyId}`)
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail if user has no apeKey permissions", async () => {
await expectFailForNoPermissions(
mockApp
.delete(`/ape-keys/${apeKeyId}`)
.set("Authorization", `Bearer ${uid}`),
);
});
});
async function expectFailForNoPermissions(call: SuperTest): Promise<void> {
getUserMock.mockResolvedValue(user(uid, { canManageApeKeys: false }));
const { body } = await call.expect(403);
expect(body.message).toEqual(
"You have lost access to ape keys, please contact support",
);
}
async function expectFailForDisabledEndpoint(call: SuperTest): Promise<void> {
await enableApeKeysEndpoints(false);
const { body } = await call.expect(503);
expect(body.message).toEqual("ApeKeys are currently disabled.");
}
});
function apeKeyDb(
uid: string,
data?: Partial<ApeKeyDal.DBApeKey>,
): ApeKeyDal.DBApeKey {
return {
_id: new ObjectId(),
uid,
hash: "hash",
useCount: 1,
name: "name",
enabled: true,
createdOn: Math.random() * Date.now(),
lastUsedOn: Math.random() * Date.now(),
modifiedOn: Math.random() * Date.now(),
...data,
};
}
async function enableApeKeysEndpoints(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.apeKeys = {
...mockConfig.apeKeys,
endpointsEnabled: enabled,
maxKeysPerUser: 1,
};
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}
function user(uid: string, data: Partial<UserDal.DBUser>): UserDal.DBUser {
return {
uid,
...data,
} as UserDal.DBUser;
}

View File

@@ -0,0 +1,133 @@
import { describe, it, expect, afterEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as ConfigDal from "../../../src/dal/config";
import { ObjectId } from "mongodb";
const { mockApp, uid } = setup();
describe("ConfigController", () => {
describe("get config", () => {
const getConfigMock = vi.spyOn(ConfigDal, "getConfig");
afterEach(() => {
getConfigMock.mockClear();
});
it("should get the users config", async () => {
//GIVEN
getConfigMock.mockResolvedValue({
_id: new ObjectId(),
uid: uid,
config: { language: "english" },
});
//WHEN
const { body } = await mockApp
.get("/configs")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Configuration retrieved",
data: { language: "english" },
});
expect(getConfigMock).toHaveBeenCalledWith(uid);
});
});
describe("update config", () => {
const saveConfigMock = vi.spyOn(ConfigDal, "saveConfig");
afterEach(() => {
saveConfigMock.mockClear();
});
it("should update the users config", async () => {
//GIVEN
saveConfigMock.mockResolvedValue({} as any);
//WHEN
const { body } = await mockApp
.patch("/configs")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({ language: "english" })
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Config updated",
data: null,
});
expect(saveConfigMock).toHaveBeenCalledWith(uid, {
language: "english",
});
});
it("should fail with unknown config", async () => {
//WHEN
const { body } = await mockApp
.patch("/configs")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({ unknownValue: "unknown" })
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`Unrecognized key(s) in object: 'unknownValue'`],
});
expect(saveConfigMock).not.toHaveBeenCalled();
});
it("should fail with invalid configs", async () => {
//WHEN
const { body } = await mockApp
.patch("/configs")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({ autoSwitchTheme: "yes", confidenceMode: "pretty" })
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [
`"confidenceMode" Invalid enum value. Expected 'off' | 'on' | 'max', received 'pretty'`,
`"autoSwitchTheme" Expected boolean, received string`,
],
});
expect(saveConfigMock).not.toHaveBeenCalled();
});
});
describe("delete config", () => {
const deleteConfigMock = vi.spyOn(ConfigDal, "deleteConfig");
afterEach(() => {
deleteConfigMock.mockClear();
});
it("should delete the users config", async () => {
//GIVEN
deleteConfigMock.mockResolvedValue();
//WHEN
const { body } = await mockApp
.delete("/configs")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Config deleted",
data: null,
});
expect(deleteConfigMock).toHaveBeenCalledWith(uid);
});
});
});

View File

@@ -0,0 +1,183 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import {
BASE_CONFIGURATION,
CONFIGURATION_FORM_SCHEMA,
} from "../../../src/constants/base-configuration";
import * as Configuration from "../../../src/init/configuration";
import type { Configuration as ConfigurationType } from "@monkeytype/schemas/configuration";
import * as Misc from "../../../src/utils/misc";
import * as AdminUuids from "../../../src/dal/admin-uids";
const { mockApp, uid, mockAuth } = setup();
describe("Configuration Controller", () => {
const isDevEnvironmentMock = vi.spyOn(Misc, "isDevEnvironment");
const isAdminMock = vi.spyOn(AdminUuids, "isAdmin");
beforeEach(() => {
isAdminMock.mockClear();
isDevEnvironmentMock.mockClear();
isDevEnvironmentMock.mockReturnValue(true);
isAdminMock.mockResolvedValue(true);
});
describe("getConfiguration", () => {
it("should get without authentication", async () => {
//GIVEN
//WHEN
const { body } = await mockApp.get("/configuration").expect(200);
//THEN
expect(body).toEqual({
message: "Configuration retrieved",
data: BASE_CONFIGURATION,
});
});
});
describe("getConfigurationSchema", () => {
it("should get without authentication on dev", async () => {
//GIVEN
mockAuth.noAuth();
//WHEN
const { body } = await mockApp.get("/configuration/schema").expect(200);
//THEN
expect(body).toEqual({
message: "Configuration schema retrieved",
data: CONFIGURATION_FORM_SCHEMA,
});
});
it("should fail without authentication on prod", async () => {
//GIVEN
isDevEnvironmentMock.mockReturnValue(false);
//WHEN
await mockApp.get("/configuration/schema").expect(401);
});
it("should get with authentication on prod", async () => {
//GIVEN
isDevEnvironmentMock.mockReturnValue(false);
//WHEN
const { body } = await mockApp
.get("/configuration/schema")
.set("Authorization", "Bearer 123456789")
.expect(200);
//THEN
expect(body).toEqual({
message: "Configuration schema retrieved",
data: CONFIGURATION_FORM_SCHEMA,
});
mockAuth.expectToHaveBeenCalled();
});
it("should fail with non-admin user on prod", async () => {
//GIVEN
isDevEnvironmentMock.mockReturnValue(false);
isAdminMock.mockResolvedValue(false);
//WHEN
const { body } = await mockApp
.get("/configuration/schema")
.set("Authorization", "Bearer 123456789")
.expect(403);
//THEN
expect(body.message).toEqual("You don't have permission to do this.");
mockAuth.expectToHaveBeenCalled();
expect(isAdminMock).toHaveBeenCalledWith(uid);
});
});
describe("updateConfiguration", () => {
const patchConfigurationMock = vi.spyOn(
Configuration,
"patchConfiguration",
);
beforeEach(() => {
patchConfigurationMock.mockClear();
patchConfigurationMock.mockResolvedValue(true);
});
it("should update without authentication on dev", async () => {
//GIVEN
mockAuth.noAuth();
const patch = {
users: {
premium: {
enabled: true,
},
},
} as Partial<ConfigurationType>;
//WHEN
const { body } = await mockApp
.patch("/configuration")
.send({ configuration: patch })
.expect(200);
//THEN
expect(body).toEqual({
message: "Configuration updated",
data: null,
});
expect(patchConfigurationMock).toHaveBeenCalledWith(patch);
});
it("should fail update without authentication on prod", async () => {
//GIVEN
mockAuth.noAuth();
isDevEnvironmentMock.mockReturnValue(false);
//WHEN
await mockApp
.patch("/configuration")
.send({ configuration: {} })
.expect(401);
//THEN
expect(patchConfigurationMock).not.toHaveBeenCalled();
});
it("should update with authentication on prod", async () => {
//GIVEN
isDevEnvironmentMock.mockReturnValue(false);
//WHEN
await mockApp
.patch("/configuration")
.set("Authorization", "Bearer 123456789")
.send({ configuration: {} })
.expect(200);
//THEN
expect(patchConfigurationMock).toHaveBeenCalled();
mockAuth.expectToHaveBeenCalled();
});
it("should fail for non admin users on prod", async () => {
//GIVEN
isDevEnvironmentMock.mockReturnValue(false);
isAdminMock.mockResolvedValue(false);
//WHEN
await mockApp
.patch("/configuration")
.set("Authorization", "Bearer 123456789")
.send({ configuration: {} })
.expect(403);
//THEN
expect(patchConfigurationMock).not.toHaveBeenCalled();
expect(isAdminMock).toHaveBeenCalledWith(uid);
});
});
});

View File

@@ -0,0 +1,397 @@
import { describe, expect, it, vi, beforeEach } from "vitest";
import request, { Test as SuperTest } from "supertest";
import app from "../../../src/app";
import { mockBearerAuthentication } from "../../__testData__/auth";
import * as Configuration from "../../../src/init/configuration";
import { ObjectId } from "mongodb";
import * as ConnectionsDal from "../../../src/dal/connections";
import * as UserDal from "../../../src/dal/user";
const mockApp = request(app);
const configuration = Configuration.getCachedConfiguration();
const uid = new ObjectId().toHexString();
const mockAuth = mockBearerAuthentication(uid);
describe("ConnectionsController", () => {
beforeEach(async () => {
await enableConnectionsEndpoints(true);
vi.useFakeTimers();
vi.setSystemTime(1000);
mockAuth.beforeEach();
});
describe("get connections", () => {
const getConnectionsMock = vi.spyOn(ConnectionsDal, "getConnections");
beforeEach(() => {
getConnectionsMock.mockClear();
});
it("should get for the current user", async () => {
//GIVEN
const friend: ConnectionsDal.DBConnection = {
_id: new ObjectId(),
lastModified: 42,
initiatorUid: new ObjectId().toHexString(),
initiatorName: "Bob",
receiverUid: new ObjectId().toHexString(),
receiverName: "Kevin",
status: "pending",
key: "key",
};
getConnectionsMock.mockResolvedValue([friend]);
//WHEN
const { body } = await mockApp
.get("/connections")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body.data).toEqual([
{ ...friend, _id: friend._id.toHexString(), key: undefined },
]);
expect(getConnectionsMock).toHaveBeenCalledWith({
initiatorUid: uid,
receiverUid: uid,
});
});
it("should filter by status", async () => {
//GIVEN
getConnectionsMock.mockResolvedValue([]);
//WHEN
await mockApp
.get("/connections")
.query({ status: "accepted" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(getConnectionsMock).toHaveBeenCalledWith({
initiatorUid: uid,
receiverUid: uid,
status: ["accepted"],
});
});
it("should filter by multiple status", async () => {
//GIVEN
getConnectionsMock.mockResolvedValue([]);
//WHEN
await mockApp
.get("/connections")
.query({ status: ["accepted", "blocked"] })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(getConnectionsMock).toHaveBeenCalledWith({
initiatorUid: uid,
receiverUid: uid,
status: ["accepted", "blocked"],
});
});
it("should filter by type incoming", async () => {
//GIVEN
getConnectionsMock.mockResolvedValue([]);
//WHEN
await mockApp
.get("/connections")
.query({ type: "incoming" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(getConnectionsMock).toHaveBeenCalledWith({
receiverUid: uid,
});
});
it("should filter by type outgoing", async () => {
//GIVEN
getConnectionsMock.mockResolvedValue([]);
//WHEN
await mockApp
.get("/connections")
.query({ type: "outgoing" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(getConnectionsMock).toHaveBeenCalledWith({
initiatorUid: uid,
});
});
it("should filter by multiple types", async () => {
//GIVEN
getConnectionsMock.mockResolvedValue([]);
//WHEN
await mockApp
.get("/connections")
.query({ type: ["incoming", "outgoing"] })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(getConnectionsMock).toHaveBeenCalledWith({
initiatorUid: uid,
receiverUid: uid,
});
});
it("should fail if connections endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp.get("/connections").set("Authorization", `Bearer ${uid}`),
);
});
it("should fail without authentication", async () => {
await mockApp.get("/connections").expect(401);
});
it("should fail for unknown query parameter", async () => {
const { body } = await mockApp
.get("/connections")
.query({ extra: "yes" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toStrictEqual({
message: "Invalid query schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
});
describe("create connection", () => {
const getUserByNameMock = vi.spyOn(UserDal, "getUserByName");
const getPartialUserMock = vi.spyOn(UserDal, "getPartialUser");
const createUserMock = vi.spyOn(ConnectionsDal, "create");
beforeEach(() => {
[getUserByNameMock, getPartialUserMock, createUserMock].forEach((it) =>
it.mockClear(),
);
});
it("should create", async () => {
//GIVEN
const me = { uid, name: "Bob" };
const myFriend = { uid: new ObjectId().toHexString(), name: "Kevin" };
getUserByNameMock.mockResolvedValue(myFriend as any);
getPartialUserMock.mockResolvedValue(me as any);
const result: ConnectionsDal.DBConnection = {
_id: new ObjectId(),
lastModified: 42,
initiatorUid: me.uid,
initiatorName: me.name,
receiverUid: myFriend.uid,
receiverName: myFriend.name,
key: "test",
status: "pending",
};
createUserMock.mockResolvedValue(result);
//WHEN
const { body } = await mockApp
.post("/connections")
.send({ receiverName: "Kevin" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body.data).toEqual({
_id: result._id.toHexString(),
lastModified: 42,
initiatorUid: me.uid,
initiatorName: me.name,
receiverUid: myFriend.uid,
receiverName: myFriend.name,
status: "pending",
});
expect(getUserByNameMock).toHaveBeenCalledWith(
"Kevin",
"create connection",
);
expect(getPartialUserMock).toHaveBeenCalledWith(
uid,
"create connection",
["uid", "name"],
);
expect(createUserMock).toHaveBeenCalledWith(me, myFriend, 100);
});
it("should fail if user and receiver are the same", async () => {
//GIVEN
const me = { uid, name: "Bob" };
getUserByNameMock.mockResolvedValue(me as any);
getPartialUserMock.mockResolvedValue(me as any);
//WHEN
const { body } = await mockApp
.post("/connections")
.send({ receiverName: "Bob" })
.set("Authorization", `Bearer ${uid}`)
.expect(400);
//THEN
expect(body.message).toEqual("You cannot be your own friend, sorry.");
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/connections")
.send({})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`"receiverName" Required`],
});
});
it("should fail with extra properties", async () => {
//WHEN
const { body } = await mockApp
.post("/connections")
.send({ receiverName: "1", extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if connections endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp
.post("/connections")
.send({ receiverName: "1" })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail without authentication", async () => {
await mockApp.post("/connections").expect(401);
});
});
describe("delete connection", () => {
const deleteByIdMock = vi.spyOn(ConnectionsDal, "deleteById");
beforeEach(() => {
deleteByIdMock.mockClear().mockResolvedValue();
});
it("should delete by id", async () => {
//WHEN
await mockApp
.delete("/connections/1")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(deleteByIdMock).toHaveBeenCalledWith(uid, "1");
});
it("should fail if connections endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp.delete("/connections/1").set("Authorization", `Bearer ${uid}`),
);
});
it("should fail without authentication", async () => {
await mockApp.delete("/connections/1").expect(401);
});
});
describe("update connection", () => {
const updateStatusMock = vi.spyOn(ConnectionsDal, "updateStatus");
beforeEach(() => {
updateStatusMock.mockClear().mockResolvedValue();
});
it("should accept", async () => {
//WHEN
await mockApp
.patch("/connections/1")
.send({ status: "accepted" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(updateStatusMock).toHaveBeenCalledWith(uid, "1", "accepted");
});
it("should block", async () => {
//WHEN
await mockApp
.patch("/connections/1")
.send({ status: "blocked" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(updateStatusMock).toHaveBeenCalledWith(uid, "1", "blocked");
});
it("should fail for invalid status", async () => {
const { body } = await mockApp
.patch("/connections/1")
.send({ status: "invalid" })
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
`"status" Invalid enum value. Expected 'accepted' | 'blocked', received 'invalid'`,
],
});
});
it("should fail if connections endpoints are disabled", async () => {
await expectFailForDisabledEndpoint(
mockApp
.patch("/connections/1")
.send({ status: "accepted" })
.set("Authorization", `Bearer ${uid}`),
);
});
it("should fail without authentication", async () => {
await mockApp
.patch("/connections/1")
.send({ status: "accepted" })
.expect(401);
});
});
});
async function enableConnectionsEndpoints(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.connections = { ...mockConfig.connections, enabled };
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}
async function expectFailForDisabledEndpoint(call: SuperTest): Promise<void> {
await enableConnectionsEndpoints(false);
const { body } = await call.expect(503);
expect(body.message).toEqual("Connections are not available at this time.");
}

View File

@@ -0,0 +1,56 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as Misc from "../../../src/utils/misc";
const { mockApp } = setup();
describe("DevController", () => {
describe("generate testData", () => {
const isDevEnvironmentMock = vi.spyOn(Misc, "isDevEnvironment");
beforeEach(() => {
isDevEnvironmentMock.mockClear();
isDevEnvironmentMock.mockReturnValue(true);
});
it("should fail on prod", async () => {
//GIVEN
isDevEnvironmentMock.mockReturnValue(false);
//WHEN
const { body } = await mockApp
.post("/dev/generateData")
.set("Authorization", "Bearer 123456789")
.send({ username: "test" })
.expect(503);
//THEN
expect(body.message).toEqual(
"Development endpoints are only available in DEV mode.",
);
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/dev/generateData")
.send({})
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [`"username" Required`],
});
});
it("should fail with unknown properties", async () => {
//WHEN
const { body } = await mockApp
.post("/dev/generateData")
.send({ username: "Bob", extra: "value" })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,501 @@
import { describe, it, expect, afterEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as PresetDal from "../../../src/dal/preset";
import { ObjectId } from "mongodb";
const { mockApp, uid } = setup();
describe("PresetController", () => {
describe("get presets", () => {
const getPresetsMock = vi.spyOn(PresetDal, "getPresets");
afterEach(() => {
getPresetsMock.mockClear();
});
it("should get the users presets", async () => {
//GIVEN
const presetOne = {
_id: new ObjectId(),
uid: uid,
name: "test1",
config: { language: "english" },
};
const presetTwo = {
_id: new ObjectId(),
uid: uid,
name: "test2",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
};
//@ts-expect-error
getPresetsMock.mockResolvedValue([presetOne, presetTwo]);
//WHEN
const { body } = await mockApp
.get("/presets")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Presets retrieved",
data: [
{
_id: presetOne._id.toHexString(),
name: "test1",
config: { language: "english" },
},
{
_id: presetTwo._id.toHexString(),
name: "test2",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
},
],
});
expect(getPresetsMock).toHaveBeenCalledWith(uid);
});
it("should return empty array if user has no presets", async () => {
//GIVEN
getPresetsMock.mockResolvedValue([]);
//WHEN
const { body } = await mockApp
.get("/presets")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Presets retrieved",
data: [],
});
expect(getPresetsMock).toHaveBeenCalledWith(uid);
});
});
describe("add preset", () => {
const addPresetMock = vi.spyOn(PresetDal, "addPreset");
afterEach(() => {
addPresetMock.mockClear();
});
it("should add the users full preset", async () => {
//GIVEN
addPresetMock.mockResolvedValue({ presetId: "1" });
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
name: "new",
config: {
language: "english",
tags: ["one", "two"],
},
})
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset created",
data: { presetId: "1" },
});
expect(addPresetMock).toHaveBeenCalledWith(uid, {
name: "new",
config: { language: "english", tags: ["one", "two"] },
});
});
it("should add the users partial preset", async () => {
//GIVEN
addPresetMock.mockResolvedValue({ presetId: "1" });
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
name: "new",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
})
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset created",
data: { presetId: "1" },
});
expect(addPresetMock).toHaveBeenCalledWith(uid, {
name: "new",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
});
});
it("should fail for no setting groups in partial presets", async () => {
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
name: "update",
settingGroups: [],
config: {},
})
.expect(422);
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [
`"settingGroups" Array must contain at least 1 element(s)`,
],
});
expect(addPresetMock).not.toHaveBeenCalled();
});
it("should not fail with emtpy config", async () => {
//GIVEN
addPresetMock.mockResolvedValue({ presetId: "1" });
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({ name: "new", config: {} })
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset created",
data: { presetId: "1" },
});
expect(addPresetMock).toHaveBeenCalledWith(uid, {
name: "new",
config: {},
});
});
it("should fail with missing mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({})
.expect(422);
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`"name" Required`, `"config" Required`],
});
expect(addPresetMock).not.toHaveBeenCalled();
});
it("should fail with invalid preset", async () => {
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
_id: "1",
name: "update",
extra: "extra",
config: {
extra: "extra",
autoSwitchTheme: "yes",
confidenceMode: "pretty",
},
})
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [
`"config.confidenceMode" Invalid enum value. Expected 'off' | 'on' | 'max', received 'pretty'`,
`"config.autoSwitchTheme" Expected boolean, received string`,
`"config" Unrecognized key(s) in object: 'extra'`,
`Unrecognized key(s) in object: '_id', 'extra'`,
],
});
expect(addPresetMock).not.toHaveBeenCalled();
});
it("should fail with duplicate group settings in partial preset", async () => {
//WHEN
const { body } = await mockApp
.post("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
name: "new",
settingGroups: ["hideElements", "hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
})
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`"settingGroups" No duplicates allowed.`],
});
expect(addPresetMock).not.toHaveBeenCalled();
});
});
describe("update preset", () => {
const editPresetMock = vi.spyOn(PresetDal, "editPreset");
afterEach(() => {
editPresetMock.mockClear();
});
it("should update the users preset", async () => {
//GIVEN
editPresetMock.mockResolvedValue({} as any);
//WHEN
const { body } = await mockApp
.patch("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
_id: "1",
name: "new",
config: {
language: "english",
tags: ["one", "two"],
},
})
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset updated",
data: null,
});
expect(editPresetMock).toHaveBeenCalledWith(uid, {
_id: "1",
name: "new",
config: { language: "english", tags: ["one", "two"] },
});
});
it("should update the users partial preset", async () => {
//GIVEN
editPresetMock.mockResolvedValue({} as any);
//WHEN
const { body } = await mockApp
.patch("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
_id: "1",
name: "new",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
})
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset updated",
data: null,
});
expect(editPresetMock).toHaveBeenCalledWith(uid, {
_id: "1",
name: "new",
settingGroups: ["hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
});
});
it("should not fail with emtpy config", async () => {
//GIVEN
editPresetMock.mockResolvedValue({} as any);
//WHEN
const { body } = await mockApp
.patch("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({ _id: "1", name: "new", config: {} })
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset updated",
data: null,
});
expect(editPresetMock).toHaveBeenCalledWith(uid, {
_id: "1",
name: "new",
config: {},
});
});
it("should fail with missing mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.patch("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({})
.expect(422);
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`"_id" Required`, `"name" Required`],
});
expect(editPresetMock).not.toHaveBeenCalled();
});
it("should fail with invalid preset", async () => {
//WHEN
const { body } = await mockApp
.patch("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
_id: "1",
name: "update",
extra: "extra",
settingGroups: ["mappers"],
config: {
extra: "extra",
autoSwitchTheme: "yes",
confidenceMode: "pretty",
},
})
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [
`"settingGroups.0" Invalid enum value. Expected 'test' | 'behavior' | 'input' | 'sound' | 'caret' | 'appearance' | 'theme' | 'hideElements' | 'hidden' | 'ads', received 'mappers'`,
`"config.confidenceMode" Invalid enum value. Expected 'off' | 'on' | 'max', received 'pretty'`,
`"config.autoSwitchTheme" Expected boolean, received string`,
`"config" Unrecognized key(s) in object: 'extra'`,
`Unrecognized key(s) in object: 'extra'`,
],
});
expect(editPresetMock).not.toHaveBeenCalled();
});
it("should fail with duplicate group settings in partial preset", async () => {
//WHEN
const { body } = await mockApp
.patch("/presets")
.set("Authorization", `Bearer ${uid}`)
.accept("application/json")
.send({
_id: "1",
name: "new",
settingGroups: ["hideElements", "hideElements"],
config: {
showKeyTips: true,
capsLockWarning: true,
showOutOfFocusWarning: true,
showAverage: "off",
},
})
.expect(422);
//THEN
expect(body).toStrictEqual({
message: "Invalid request data schema",
validationErrors: [`"settingGroups" No duplicates allowed.`],
});
expect(editPresetMock).not.toHaveBeenCalled();
});
});
describe("delete config", () => {
const deletePresetMock = vi.spyOn(PresetDal, "removePreset");
afterEach(() => {
deletePresetMock.mockClear();
});
it("should delete the users preset", async () => {
//GIVEN
deletePresetMock.mockResolvedValue();
//WHEN
const { body } = await mockApp
.delete("/presets/1")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toStrictEqual({
message: "Preset deleted",
data: null,
});
expect(deletePresetMock).toHaveBeenCalledWith(uid, "1");
});
it("should fail without preset _id", async () => {
//GIVEN
deletePresetMock.mockResolvedValue();
//WHEN
await mockApp
.delete("/presets/")
.set("Authorization", `Bearer ${uid}`)
.expect(404);
expect(deletePresetMock).not.toHaveBeenCalled();
});
});
});

View File

@@ -0,0 +1,81 @@
import { describe, it, expect, afterEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as PsaDal from "../../../src/dal/psa";
import * as Prometheus from "../../../src/utils/prometheus";
import { ObjectId } from "mongodb";
const { mockApp, uid } = setup();
describe("Psa Controller", () => {
describe("get psa", () => {
const getPsaMock = vi.spyOn(PsaDal, "get");
const recordClientVersionMock = vi.spyOn(Prometheus, "recordClientVersion");
afterEach(() => {
getPsaMock.mockClear();
recordClientVersionMock.mockClear();
});
it("get psas without authorization", async () => {
//GIVEN
const psaOne: PsaDal.DBPSA = {
_id: new ObjectId(),
message: "test2",
date: 1000,
level: 1,
sticky: true,
};
const psaTwo: PsaDal.DBPSA = {
_id: new ObjectId(),
message: "test2",
date: 2000,
level: 2,
sticky: false,
};
getPsaMock.mockResolvedValue([psaOne, psaTwo]);
//WHEN
const { body } = await mockApp.get("/psas").expect(200);
//THEN
expect(body).toEqual({
message: "PSAs retrieved",
data: [
{
_id: psaOne._id.toHexString(),
date: 1000,
level: 1,
message: "test2",
sticky: true,
},
{
_id: psaTwo._id.toHexString(),
date: 2000,
level: 2,
message: "test2",
sticky: false,
},
],
});
expect(recordClientVersionMock).toHaveBeenCalledWith("unknown");
});
it("get psas with authorization", async () => {
await mockApp
.get("/psas")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
});
it("get psas records x-client-version", async () => {
await mockApp.get("/psas").set("x-client-version", "1.0").expect(200);
expect(recordClientVersionMock).toHaveBeenCalledWith("1.0");
});
it("get psas records client-version", async () => {
await mockApp.get("/psas").set("client-version", "2.0").expect(200);
expect(recordClientVersionMock).toHaveBeenCalledWith("2.0");
});
});
});

View File

@@ -0,0 +1,144 @@
import { describe, it, expect, afterEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as PublicDal from "../../../src/dal/public";
const { mockApp } = setup();
describe("PublicController", () => {
describe("get speed histogram", () => {
const getSpeedHistogramMock = vi.spyOn(PublicDal, "getSpeedHistogram");
afterEach(() => {
getSpeedHistogramMock.mockClear();
});
it("gets for english time 60", async () => {
//GIVEN
getSpeedHistogramMock.mockResolvedValue({ "0": 1, "10": 2 });
//WHEN
const { body } = await mockApp
.get("/public/speedHistogram")
.query({ language: "english", mode: "time", mode2: "60" })
.expect(200);
//THEN
expect(body).toEqual({
message: "Public speed histogram retrieved",
data: { "0": 1, "10": 2 },
});
expect(getSpeedHistogramMock).toHaveBeenCalledWith(
"english",
"time",
"60",
);
});
it("gets for mode", async () => {
for (const mode of ["time", "words", "quote", "zen", "custom"]) {
const response = await mockApp
.get("/public/speedHistogram")
.query({ language: "english", mode, mode2: "custom" });
expect(response.status, "for mode " + mode).toEqual(200);
}
});
it("gets for mode2", async () => {
for (const mode2 of [
"10",
"25",
"50",
"100",
"15",
"30",
"60",
"120",
"zen",
"custom",
]) {
const response = await mockApp
.get("/public/speedHistogram")
.query({ language: "english", mode: "words", mode2 });
expect(response.status, "for mode2 " + mode2).toEqual(200);
}
});
it("fails for missing query", async () => {
const { body } = await mockApp.get("/public/speedHistogram").expect(422);
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: [
'"language" Required',
'"mode" Required',
'"mode2" Needs to be either a number, "zen" or "custom".',
],
});
});
it("fails for invalid query", async () => {
const { body } = await mockApp
.get("/public/speedHistogram")
.query({
language: "en?gli.sh",
mode: "unknownMode",
mode2: "unknownMode2",
})
.expect(422);
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: [
'"language" Invalid enum value. Must be a supported language',
`"mode" Invalid enum value. Expected 'time' | 'words' | 'quote' | 'custom' | 'zen', received 'unknownMode'`,
'"mode2" Needs to be a number or a number represented as a string e.g. "10".',
],
});
});
it("fails for unknown query", async () => {
const { body } = await mockApp
.get("/public/speedHistogram")
.query({
language: "english",
mode: "time",
mode2: "60",
extra: "value",
})
.expect(422);
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
});
describe("get typing stats", () => {
const getTypingStatsMock = vi.spyOn(PublicDal, "getTypingStats");
afterEach(() => {
getTypingStatsMock.mockClear();
});
it("gets without authentication", async () => {
//GIVEN
getTypingStatsMock.mockResolvedValue({
testsCompleted: 23,
testsStarted: 42,
timeTyping: 1000,
} as any);
//WHEN
const { body } = await mockApp.get("/public/typingStats").expect(200);
//THEN
expect(body).toEqual({
message: "Public typing stats retrieved",
data: {
testsCompleted: 23,
testsStarted: 42,
timeTyping: 1000,
},
});
});
});
});

View File

@@ -0,0 +1,896 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as Configuration from "../../../src/init/configuration";
import * as UserDal from "../../../src/dal/user";
import * as NewQuotesDal from "../../../src/dal/new-quotes";
import type { DBNewQuote } from "../../../src/dal/new-quotes";
import * as QuoteRatingsDal from "../../../src/dal/quote-ratings";
import * as ReportDal from "../../../src/dal/report";
import * as LogsDal from "../../../src/dal/logs";
import * as Captcha from "../../../src/utils/captcha";
import { ObjectId } from "mongodb";
import { ApproveQuote } from "@monkeytype/schemas/quotes";
const { mockApp, uid } = setup();
const configuration = Configuration.getCachedConfiguration();
describe("QuotesController", () => {
const getPartialUserMock = vi.spyOn(UserDal, "getPartialUser");
const logsAddLogMock = vi.spyOn(LogsDal, "addLog");
beforeEach(() => {
enableQuotes(true);
const user = { quoteMod: true, name: "Bob" } as any;
getPartialUserMock.mockClear().mockResolvedValue(user);
logsAddLogMock.mockClear().mockResolvedValue();
});
describe("getQuotes", () => {
const getQuotesMock = vi.spyOn(NewQuotesDal, "get");
beforeEach(() => {
getQuotesMock.mockClear();
getQuotesMock.mockResolvedValue([]);
});
it("should return quotes", async () => {
//GIVEN
const quoteOne: DBNewQuote = {
_id: new ObjectId(),
text: "test",
source: "Bob",
language: "english",
submittedBy: "Kevin",
timestamp: 1000,
approved: true,
};
const quoteTwo: DBNewQuote = {
_id: new ObjectId(),
text: "test2",
source: "Stuart",
language: "english",
submittedBy: "Kevin",
timestamp: 2000,
approved: false,
};
getQuotesMock.mockResolvedValue([quoteOne, quoteTwo]);
//WHEN
const { body } = await mockApp
.get("/quotes")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body.message).toEqual("Quote submissions retrieved");
expect(body.data).toEqual([
{ ...quoteOne, _id: quoteOne._id.toHexString() },
{
...quoteTwo,
_id: quoteTwo._id.toHexString(),
},
]);
expect(getQuotesMock).toHaveBeenCalledWith("all");
});
it("should return quotes with quoteMod", async () => {
//GIVEN
getPartialUserMock
.mockClear()
.mockResolvedValue({ quoteMod: "english" } as any);
//WHEN
await mockApp
.get("/quotes")
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(getQuotesMock).toHaveBeenCalledWith("english");
});
it("should fail with quoteMod false", async () => {
//GIVEN
getPartialUserMock
.mockClear()
.mockResolvedValue({ quoteMod: false } as any);
//WHEN
const { body } = await mockApp
.get("/quotes")
.set("Authorization", `Bearer ${uid}`)
.expect(403);
//THEN
expect(body.message).toEqual("You don't have permission to do this.");
expect(getQuotesMock).not.toHaveBeenCalled();
});
it("should fail with quoteMod empty", async () => {
//GIVEN
getPartialUserMock.mockClear().mockResolvedValue({ quoteMod: "" } as any);
//WHEN
const { body } = await mockApp
.get("/quotes")
.set("Authorization", `Bearer ${uid}`)
.expect(403);
//THEN
expect(body.message).toEqual("You don't have permission to do this.");
expect(getQuotesMock).not.toHaveBeenCalled();
});
it("should fail without authentication", async () => {
await mockApp.get("/quotes").expect(401);
});
});
describe("isSubmissionsEnabled", () => {
it("should return for quotes enabled without authentication", async () => {
//GIVEN
enableQuotes(true);
//WHEN
const { body } = await mockApp
.get("/quotes/isSubmissionEnabled")
.expect(200);
expect(body).toEqual({
message: "Quote submission enabled",
data: { isEnabled: true },
});
});
it("should return for quotes disabled without authentication", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.get("/quotes/isSubmissionEnabled")
.expect(200);
expect(body).toEqual({
message: "Quote submission enabled",
data: { isEnabled: true },
});
});
});
describe("addQuote", () => {
const addQuoteMock = vi.spyOn(NewQuotesDal, "add");
const verifyCaptchaMock = vi.spyOn(Captcha, "verify");
beforeEach(() => {
addQuoteMock.mockClear();
addQuoteMock.mockResolvedValue({} as any);
verifyCaptchaMock.mockClear();
verifyCaptchaMock.mockResolvedValue(true);
});
it("should add quote", async () => {
//GIVEN
const newQuote = {
text: new Array(60).fill("a").join(""),
source: "Bob",
language: "english",
captcha: "captcha",
};
//WHEN
const { body } = await mockApp
.post("/quotes")
.set("Authorization", `Bearer ${uid}`)
.send(newQuote)
.expect(200);
//THEN
expect(body).toEqual({
message: "Quote submission added",
data: null,
});
expect(addQuoteMock).toHaveBeenCalledWith(
newQuote.text,
newQuote.source,
newQuote.language,
uid,
);
expect(verifyCaptchaMock).toHaveBeenCalledWith(newQuote.captcha);
});
it("should fail without authentication", async () => {
await mockApp.post("/quotes").expect(401);
});
it("should fail if feature is disabled", async () => {
//GIVEN
enableQuotes(false);
//WHEN
const { body } = await mockApp
.post("/quotes")
.set("Authorization", `Bearer ${uid}`)
.expect(503);
//THEN
expect(body.message).toEqual(
"Quote submission is disabled temporarily. The queue is quite long and we need some time to catch up.",
);
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes")
.set("Authorization", `Bearer ${uid}`)
.expect(422);
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"text" Required',
'"source" Required',
'"language" Required',
'"captcha" Required',
],
});
});
it("should fail with unknown properties", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes")
.send({
text: new Array(60).fill("a").join(""),
source: "Bob",
language: "english",
captcha: "captcha",
extra: "value",
})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail with invalid capture", async () => {
//GIVEN
verifyCaptchaMock.mockResolvedValue(false);
//WHEN
const { body } = await mockApp
.post("/quotes")
.send({
text: new Array(60).fill("a").join(""),
source: "Bob",
language: "english",
captcha: "captcha",
})
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body.message).toEqual("Captcha check failed");
});
});
describe("approveQuote", () => {
const approveQuoteMock = vi.spyOn(NewQuotesDal, "approve");
beforeEach(() => {
approveQuoteMock.mockClear();
});
it("should approve", async () => {
//GiVEN
const quoteId = new ObjectId().toHexString();
const quote: ApproveQuote = {
id: 100,
text: "text",
source: "source",
length: 10,
approvedBy: "Kevin",
};
approveQuoteMock.mockResolvedValue({
message: "ok",
quote,
});
//WHEN
const { body } = await mockApp
.post("/quotes/approve")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId,
editText: "editedText",
editSource: "editedSource",
})
.expect(200);
//THEN
expect(body).toEqual({
message: "ok",
data: quote,
});
expect(approveQuoteMock).toHaveBeenCalledWith(
quoteId,
"editedText",
"editedSource",
"Bob",
);
});
it("should approve with optional parameters as null", async () => {
//GiVEN
const quoteId = new ObjectId().toHexString();
approveQuoteMock.mockResolvedValue({
message: "ok",
quote: {} as any,
});
//WHEN
const { body } = await mockApp
.post("/quotes/approve")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId, editText: null, editSource: null })
.expect(200);
//THEN
expect(body).toEqual({
message: "ok",
data: {},
});
expect(approveQuoteMock).toHaveBeenCalledWith(
quoteId,
undefined,
undefined,
"Bob",
);
});
it("should approve without optional parameters", async () => {
//GiVEN
const quoteId = new ObjectId().toHexString();
approveQuoteMock.mockResolvedValue({
message: "ok",
quote: {} as any,
});
//WHEN
const { body } = await mockApp
.post("/quotes/approve")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId })
.expect(200);
//THEN
expect(body).toEqual({
message: "ok",
data: {},
});
expect(approveQuoteMock).toHaveBeenCalledWith(
quoteId,
undefined,
undefined,
"Bob",
);
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/approve")
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"quoteId" Required'],
});
});
it("should fail with unknown properties", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/approve")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId: new ObjectId().toHexString(), extra: "value" })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if user is no quote mod", async () => {
//GIVEN
getPartialUserMock.mockClear().mockResolvedValue({} as any);
//WHEN
const { body } = await mockApp
.post("/quotes/approve")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId: new ObjectId().toHexString() })
.expect(403);
//THEN
expect(body.message).toEqual("You don't have permission to do this.");
});
it("should fail without authentication", async () => {
await mockApp
.post("/quotes/approve")
.send({ quoteId: new ObjectId().toHexString() })
.expect(401);
});
});
describe("refuseQuote", () => {
const refuseQuoteMock = vi.spyOn(NewQuotesDal, "refuse");
beforeEach(() => {
refuseQuoteMock.mockClear();
refuseQuoteMock.mockResolvedValue();
});
it("should refuse quote", async () => {
//GIVEN
const quoteId = new ObjectId().toHexString();
//WHEN
const { body } = await mockApp
.post("/quotes/reject")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId })
.expect(200);
//THEN
expect(body).toEqual({
message: "Quote refused",
data: null,
});
expect(refuseQuoteMock).toHaveBeenCalledWith(quoteId);
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/reject")
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"quoteId" Required'],
});
});
it("should fail with unknown properties", async () => {
//GIVEN
const quoteId = new ObjectId().toHexString();
//WHEN
const { body } = await mockApp
.post("/quotes/reject")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId, extra: "value" })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail if user is no quote mod", async () => {
//GIVEN
getPartialUserMock.mockClear().mockResolvedValue({} as any);
const quoteId = new ObjectId().toHexString();
//WHEN
const { body } = await mockApp
.post("/quotes/reject")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId })
.expect(403);
//THEN
expect(body.message).toEqual("You don't have permission to do this.");
});
it("should fail without authentication", async () => {
await mockApp
.post("/quotes/reject")
.send({ quoteId: new ObjectId().toHexString() })
.expect(401);
});
});
describe("getRating", () => {
const getRatingMock = vi.spyOn(QuoteRatingsDal, "get");
beforeEach(() => {
getRatingMock.mockClear();
});
it("should get", async () => {
//GIVEN
const quoteRating = {
_id: new ObjectId(),
average: 2,
language: "english",
quoteId: 23,
ratings: 100,
totalRating: 122,
};
getRatingMock.mockResolvedValue(quoteRating as any);
//WHEN
const { body } = await mockApp
.get("/quotes/rating")
.query({ quoteId: 42, language: "english" })
.set("Authorization", `Bearer ${uid}`)
.expect(200);
//THEN
expect(body).toEqual({
message: "Rating retrieved",
data: { ...quoteRating, _id: quoteRating._id.toHexString() },
});
expect(getRatingMock).toHaveBeenCalledWith(42, "english");
});
it("should fail without mandatory query parameters", async () => {
//WHEN
const { body } = await mockApp
.get("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: ['"quoteId" Invalid input', '"language" Required'],
});
});
it("should fail with unknown query parameters", async () => {
//WHEN
const { body } = await mockApp
.get("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.query({ quoteId: 42, language: "english", extra: "value" })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail without authentication", async () => {
await mockApp
.get("/quotes/rating")
.query({ quoteId: 42, language: "english" })
.expect(401);
});
});
describe("submitRating", () => {
const updateQuotesRatingsMock = vi.spyOn(UserDal, "updateQuoteRatings");
const submitQuoteRating = vi.spyOn(QuoteRatingsDal, "submit");
beforeEach(() => {
getPartialUserMock
.mockClear()
.mockResolvedValue({ quoteRatings: null } as any);
updateQuotesRatingsMock.mockClear().mockResolvedValue({} as any);
submitQuoteRating.mockClear().mockResolvedValue();
});
it("should submit new rating", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId: 23,
rating: 4,
language: "english",
})
.expect(200);
//THEN
expect(body).toEqual({
message: "Rating submitted",
data: null,
});
expect(submitQuoteRating).toHaveBeenCalledWith(23, "english", 4, false);
expect(updateQuotesRatingsMock).toHaveBeenCalledWith(uid, {
english: { "23": 4 },
});
});
it("should update existing rating", async () => {
//GIVEN
getPartialUserMock.mockClear().mockResolvedValue({
quoteRatings: { german: { "4": 1 }, english: { "5": 5, "23": 4 } },
} as any);
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId: 23,
rating: 2,
language: "english",
})
.expect(200);
//THEN
expect(body).toEqual({
message: "Rating updated",
data: null,
});
expect(submitQuoteRating).toHaveBeenCalledWith(23, "english", -2, true);
expect(updateQuotesRatingsMock).toHaveBeenCalledWith(uid, {
german: { "4": 1 },
english: { "5": 5, "23": 2 },
});
});
it("should update existing rating with same rating", async () => {
//GIVEN
getPartialUserMock.mockClear().mockResolvedValue({
quoteRatings: { german: { "4": 1 }, english: { "5": 5, "23": 4 } },
} as any);
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId: 23,
rating: 4,
language: "english",
})
.expect(200);
//THEN
expect(body).toEqual({
message: "Rating updated",
data: null,
});
expect(submitQuoteRating).toHaveBeenCalledWith(23, "english", 0, true);
expect(updateQuotesRatingsMock).toHaveBeenCalledWith(uid, {
german: { "4": 1 },
english: { "5": 5, "23": 4 },
});
});
it("should fail with missing mandatory parameter", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"quoteId" Invalid input',
'"language" Required',
'"rating" Required',
],
});
});
it("should fail with unknown parameter", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId: 23, language: "english", rating: 5, extra: "value" })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should fail with zero rating", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId: 23, language: "english", rating: 0 })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"rating" Number must be greater than or equal to 1',
],
});
});
it("should fail with rating bigger than 5", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId: 23, language: "english", rating: 6 })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"rating" Number must be less than or equal to 5'],
});
});
it("should fail with non-integer rating", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/rating")
.set("Authorization", `Bearer ${uid}`)
.send({ quoteId: 23, language: "english", rating: 2.5 })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"rating" Expected integer, received float'],
});
});
it("should fail without authentication", async () => {
await mockApp.post("/quotes/rating").expect(401);
});
});
describe("reportQuote", () => {
const verifyCaptchaMock = vi.spyOn(Captcha, "verify");
const createReportMock = vi.spyOn(ReportDal, "createReport");
beforeEach(() => {
enableQuoteReporting(true);
verifyCaptchaMock.mockClear().mockResolvedValue(true);
createReportMock.mockClear().mockResolvedValue();
});
it("should report quote", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/quotes/report")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId: "23", //quoteId is string on this endpoint
quoteLanguage: "english",
reason: "Inappropriate content",
comment: "I don't like this.",
captcha: "captcha",
});
//.expect(200);
//THEN
expect(body).toEqual({
message: "Quote reported",
data: null,
});
expect(verifyCaptchaMock).toHaveBeenCalledWith("captcha");
expect(createReportMock).toHaveBeenCalledWith(
expect.objectContaining({
type: "quote",
uid,
contentId: "english-23",
reason: "Inappropriate content",
comment: "I don't like this.",
}),
10, //configuration maxReport
20, //configuration contentReportLimit
);
});
it("should report quote without comment", async () => {
await mockApp
.post("/quotes/report")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId: "23", //quoteId is string on this endpoint
quoteLanguage: "english",
reason: "Inappropriate content",
captcha: "captcha",
})
.expect(200);
});
it("should report quote with empty comment", async () => {
await mockApp
.post("/quotes/report")
.set("Authorization", `Bearer ${uid}`)
.send({
quoteId: "23", //quoteId is string on this endpoint
quoteLanguage: "english",
reason: "Inappropriate content",
comment: "",
captcha: "captcha",
})
.expect(200);
});
it("should fail without mandatory properties", async () => {
//WHEN
const { body } = await mockApp
.post("/quotes/report")
.set("Authorization", `Bearer ${uid}`)
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"quoteId" Invalid input',
'"quoteLanguage" Required',
'"reason" Required',
'"captcha" Required',
],
});
});
it("should fail if feature is disabled", async () => {
//GIVEN
enableQuoteReporting(false);
//WHEN
const { body } = await mockApp
.post("/quotes/report")
.set("Authorization", `Bearer ${uid}`)
.expect(503);
//THEN
expect(body.message).toEqual("Quote reporting is unavailable.");
});
it("should fail if user cannot report", async () => {
//GIVEN
getPartialUserMock
.mockClear()
.mockResolvedValue({ canReport: false } as any);
//WHEN
const { body } = await mockApp
.post("/quotes/report")
.set("Authorization", `Bearer ${uid}`)
.expect(403);
//THEN
expect(body.message).toEqual("You don't have permission to do this.");
});
});
});
async function enableQuotes(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.quotes = { ...mockConfig.quotes, submissionsEnabled: enabled };
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}
async function enableQuoteReporting(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.quotes.reporting = {
...mockConfig.quotes.reporting,
enabled,
maxReports: 10,
contentReportLimit: 20,
};
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}

View File

@@ -0,0 +1,891 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import * as Configuration from "../../../src/init/configuration";
import * as ResultDal from "../../../src/dal/result";
import * as UserDal from "../../../src/dal/user";
import * as LogsDal from "../../../src/dal/logs";
import * as PublicDal from "../../../src/dal/public";
import { ObjectId } from "mongodb";
import { mockAuthenticateWithApeKey } from "../../__testData__/auth";
import { enableRateLimitExpects } from "../../__testData__/rate-limit";
import { DBResult } from "../../../src/utils/result";
import { omit } from "../../../src/utils/misc";
import { CompletedEvent } from "@monkeytype/schemas/results";
const { mockApp, uid, mockAuth } = setup();
const configuration = Configuration.getCachedConfiguration();
enableRateLimitExpects();
describe("result controller test", () => {
describe("getResults", () => {
const resultMock = vi.spyOn(ResultDal, "getResults");
beforeEach(async () => {
resultMock.mockResolvedValue([]);
await enablePremiumFeatures(true);
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(false);
});
afterEach(() => {
resultMock.mockClear();
});
it("should get results", async () => {
//GIVEN
const resultOne = givenDbResult(uid);
const resultTwo = givenDbResult(uid);
resultMock.mockResolvedValue([resultOne, resultTwo]);
//WHEN
const { body } = await mockApp
.get("/results")
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(body.message).toEqual("Results retrieved");
expect(body.data).toEqual([
{ ...resultOne, _id: resultOne._id.toHexString() },
{ ...resultTwo, _id: resultTwo._id.toHexString() },
]);
});
it("should get results with ape key", async () => {
//GIVEN
await acceptApeKeys(true);
const apeKey = await mockAuthenticateWithApeKey(uid, await configuration);
//WHEN
await mockApp
.get("/results")
.set("Authorization", `ApeKey ${apeKey}`)
.send()
.expect(200);
});
it("should get latest 1000 results for regular user", async () => {
//WHEN
await mockApp
.get("/results")
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 1000,
offset: 0,
onOrAfterTimestamp: NaN,
});
});
it("should get results filter by onOrAfterTimestamp", async () => {
//GIVEN
const now = Date.now();
//WHEN
await mockApp
.get("/results")
.query({ onOrAfterTimestamp: now })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 1000,
offset: 0,
onOrAfterTimestamp: now,
});
});
it("should get with limit and offset", async () => {
//WHEN
await mockApp
.get("/results")
.query({ limit: 250, offset: 500 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 250,
offset: 500,
onOrAfterTimestamp: NaN,
});
});
it("should fail exceeding max limit for regular user", async () => {
//WHEN
const { body } = await mockApp
.get("/results")
.query({ limit: 100, offset: 1000 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(422);
//THEN
expect(body.message).toEqual(
`Max results limit of ${
(await configuration).results.limits.regularUser
} exceeded.`,
);
});
it("should get with higher max limit for premium user", async () => {
//GIVEN
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(true);
//WHEN
await mockApp
.get("/results")
.query({ limit: 800, offset: 600 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 800,
offset: 600,
onOrAfterTimestamp: NaN,
});
});
it("should get results if offset/limit is partly outside the max limit", async () => {
//WHEN
await mockApp
.get("/results")
.query({ limit: 20, offset: 990 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 10, //limit is reduced to stay within max limit
offset: 990,
onOrAfterTimestamp: NaN,
});
});
it("should fail exceeding 1k limit", async () => {
//GIVEN
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(false);
//WHEN
const { body } = await mockApp
.get("/results")
.query({ limit: 2000 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: ['"limit" Number must be less than or equal to 1000'],
});
});
it("should fail exceeding maxlimit for premium user", async () => {
//GIVEN
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(true);
//WHEN
const { body } = await mockApp
.get("/results")
.query({ limit: 1000, offset: 25000 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(422);
//THEN
expect(body.message).toEqual(
`Max results limit of ${
(await configuration).results.limits.premiumUser
} exceeded.`,
);
});
it("should get results within regular limits for premium users even if premium is globally disabled", async () => {
//GIVEN
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(true);
enablePremiumFeatures(false);
//WHEN
await mockApp
.get("/results")
.query({ limit: 100, offset: 900 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 100,
offset: 900,
onOrAfterTimestamp: NaN,
});
});
it("should fail exceeding max limit for premium user if premium is globally disabled", async () => {
//GIVEN
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(true);
enablePremiumFeatures(false);
//WHEN
const { body } = await mockApp
.get("/results")
.query({ limit: 200, offset: 900 })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(503);
//THEN
expect(body.message).toEqual("Premium feature disabled.");
});
it("should get results with regular limit as default for premium users if premium is globally disabled", async () => {
//GIVEN
vi.spyOn(UserDal, "checkIfUserIsPremium").mockResolvedValue(true);
enablePremiumFeatures(false);
//WHEN
await mockApp
.get("/results")
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(resultMock).toHaveBeenCalledWith(uid, {
limit: 1000, //the default limit for regular users
offset: 0,
onOrAfterTimestamp: NaN,
});
});
it("should fail with unknown query parameters", async () => {
//WHEN
const { body } = await mockApp
.get("/results")
.query({ extra: "value" })
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid query schema",
validationErrors: ["Unrecognized key(s) in object: 'extra'"],
});
});
it("should be rate limited", async () => {
await expect(
mockApp.get("/results").set("Authorization", `Bearer ${uid}`),
).toBeRateLimited({ max: 60, windowMs: 60 * 60 * 1000 });
});
it("should be rate limited for ape keys", async () => {
//GIVEN
await acceptApeKeys(true);
const apeKey = await mockAuthenticateWithApeKey(uid, await configuration);
//WHEN
await expect(
mockApp.get("/results").set("Authorization", `ApeKey ${apeKey}`),
).toBeRateLimited({ max: 30, windowMs: 24 * 60 * 60 * 1000 });
});
});
describe("getResultById", () => {
const getResultMock = vi.spyOn(ResultDal, "getResult");
afterEach(() => {
getResultMock.mockClear();
});
it("should get result", async () => {
//GIVEN
const result = givenDbResult(uid);
getResultMock.mockResolvedValue(result);
//WHEN
const { body } = await mockApp
.get(`/results/id/${result._id}`)
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(body.message).toEqual("Result retrieved");
expect(body.data).toEqual({ ...result, _id: result._id.toHexString() });
});
it("should get last result with ape key", async () => {
//GIVEN
await acceptApeKeys(true);
const apeKey = await mockAuthenticateWithApeKey(uid, await configuration);
const result = givenDbResult(uid);
getResultMock.mockResolvedValue(result);
//WHEN
await mockApp
.get(`/results/id/${result._id}`)
.set("Authorization", `ApeKey ${apeKey}`)
.send()
.expect(200);
});
it("should rate limit get result with ape key", async () => {
//GIVEN
const result = givenDbResult(uid, {
charStats: undefined,
incorrectChars: 5,
correctChars: 12,
});
getResultMock.mockResolvedValue(result);
await acceptApeKeys(true);
const apeKey = await mockAuthenticateWithApeKey(uid, await configuration);
//WHEN
await expect(
mockApp
.get(`/results/id/${result._id}`)
.set("Authorization", `ApeKey ${apeKey}`),
).toBeRateLimited({ max: 60, windowMs: 60 * 60 * 1000 });
});
});
describe("getLastResult", () => {
const getLastResultMock = vi.spyOn(ResultDal, "getLastResult");
afterEach(() => {
getLastResultMock.mockClear();
});
it("should get last result", async () => {
//GIVEN
const result = givenDbResult(uid);
getLastResultMock.mockResolvedValue(result);
//WHEN
const { body } = await mockApp
.get("/results/last")
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(body.message).toEqual("Result retrieved");
expect(body.data).toEqual({ ...result, _id: result._id.toHexString() });
});
it("should get last result with ape key", async () => {
//GIVEN
await acceptApeKeys(true);
const apeKey = await mockAuthenticateWithApeKey(uid, await configuration);
const result = givenDbResult(uid);
getLastResultMock.mockResolvedValue(result);
//WHEN
await mockApp
.get("/results/last")
.set("Authorization", `ApeKey ${apeKey}`)
.send()
.expect(200);
});
it("should rate limit get last result with ape key", async () => {
//GIVEN
const result = givenDbResult(uid, {
charStats: undefined,
incorrectChars: 5,
correctChars: 12,
});
getLastResultMock.mockResolvedValue(result);
await acceptApeKeys(true);
const apeKey = await mockAuthenticateWithApeKey(uid, await configuration);
//WHEN
await expect(
mockApp.get("/results/last").set("Authorization", `ApeKey ${apeKey}`),
).toBeRateLimited({ max: 30, windowMs: 60 * 1000 }); //should use defaultApeRateLimit
});
});
describe("deleteAll", () => {
const deleteAllMock = vi.spyOn(ResultDal, "deleteAll");
const logToDbMock = vi.spyOn(LogsDal, "addLog");
afterEach(() => {
deleteAllMock.mockClear();
logToDbMock.mockClear();
});
it("should delete", async () => {
//GIVEN
mockAuth.modifyToken({ iat: Date.now() - 1000 });
deleteAllMock.mockResolvedValue(undefined as any);
//WHEN
const { body } = await mockApp
.delete("/results")
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(200);
//THEN
expect(body.message).toEqual("All results deleted");
expect(body.data).toBeNull();
expect(deleteAllMock).toHaveBeenCalledWith(uid);
expect(logToDbMock).toHaveBeenCalledWith("user_results_deleted", "", uid);
});
it("should fail to delete with non-fresh token", async () => {
//GIVEN
mockAuth.modifyToken({ iat: 0 });
//WHEN/THEN
await mockApp
.delete("/results")
.set("Authorization", `Bearer ${uid}`)
.send()
.expect(401);
});
});
describe("updateTags", () => {
const getResultMock = vi.spyOn(ResultDal, "getResult");
const updateTagsMock = vi.spyOn(ResultDal, "updateTags");
const getUserPartialMock = vi.spyOn(UserDal, "getPartialUser");
const checkIfTagPbMock = vi.spyOn(UserDal, "checkIfTagPb");
afterEach(() => {
[
getResultMock,
updateTagsMock,
getUserPartialMock,
checkIfTagPbMock,
].forEach((it) => it.mockClear());
});
it("should update tags", async () => {
//GIVEN
const result = givenDbResult(uid);
const resultIdString = result._id.toHexString();
const tagIds = [
new ObjectId().toHexString(),
new ObjectId().toHexString(),
];
const partialUser = { tags: [] };
getResultMock.mockResolvedValue(result);
updateTagsMock.mockResolvedValue({} as any);
getUserPartialMock.mockResolvedValue(partialUser as any);
checkIfTagPbMock.mockResolvedValue([]);
//WHEN
const { body } = await mockApp
.patch("/results/tags")
.set("Authorization", `Bearer ${uid}`)
.send({ resultId: resultIdString, tagIds })
.expect(200);
//THEN
expect(body.message).toEqual("Result tags updated");
expect(body.data).toEqual({
tagPbs: [],
});
expect(updateTagsMock).toHaveBeenCalledWith(uid, resultIdString, tagIds);
expect(getResultMock).toHaveBeenCalledWith(uid, resultIdString);
expect(getUserPartialMock).toHaveBeenCalledWith(uid, "update tags", [
"tags",
]);
expect(checkIfTagPbMock).toHaveBeenCalledWith(uid, partialUser, result);
});
it("should apply defaults on missing data", async () => {
//GIVEN
const result = givenDbResult(uid);
const partialResult = omit(result, [
"difficulty",
"language",
"funbox",
"lazyMode",
"punctuation",
"numbers",
]);
const resultIdString = result._id.toHexString();
const tagIds = [
new ObjectId().toHexString(),
new ObjectId().toHexString(),
];
const partialUser = { tags: [] };
getResultMock.mockResolvedValue(partialResult);
updateTagsMock.mockResolvedValue({} as any);
getUserPartialMock.mockResolvedValue(partialUser as any);
checkIfTagPbMock.mockResolvedValue([]);
//WHEN
const { body } = await mockApp
.patch("/results/tags")
.set("Authorization", `Bearer ${uid}`)
.send({ resultId: resultIdString, tagIds })
.expect(200);
//THEN
expect(body.message).toEqual("Result tags updated");
expect(body.data).toEqual({
tagPbs: [],
});
expect(updateTagsMock).toHaveBeenCalledWith(uid, resultIdString, tagIds);
expect(getResultMock).toHaveBeenCalledWith(uid, resultIdString);
expect(getUserPartialMock).toHaveBeenCalledWith(uid, "update tags", [
"tags",
]);
expect(checkIfTagPbMock).toHaveBeenCalledWith(uid, partialUser, {
...result,
difficulty: "normal",
language: "english",
funbox: [],
lazyMode: false,
punctuation: false,
numbers: false,
});
});
it("should fail with missing mandatory properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.patch("/results/tags")
.set("Authorization", `Bearer ${uid}`)
.send({})
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"tagIds" Required', '"resultId" Required'],
});
});
it("should fail with unknown properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.patch("/results/tags")
.set("Authorization", `Bearer ${uid}`)
.send({ extra: "value" })
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
'"tagIds" Required',
'"resultId" Required',
"Unrecognized key(s) in object: 'extra'",
],
});
});
});
describe("addResult", () => {
//TODO improve test coverage for addResult
const insertedId = new ObjectId();
const userGetMock = vi.spyOn(UserDal, "getUser");
const userUpdateStreakMock = vi.spyOn(UserDal, "updateStreak");
const userCheckIfTagPbMock = vi.spyOn(UserDal, "checkIfTagPb");
const userCheckIfPbMock = vi.spyOn(UserDal, "checkIfPb");
const userIncrementXpMock = vi.spyOn(UserDal, "incrementXp");
const userUpdateTypingStatsMock = vi.spyOn(UserDal, "updateTypingStats");
const resultAddMock = vi.spyOn(ResultDal, "addResult");
const publicUpdateStatsMock = vi.spyOn(PublicDal, "updateStats");
beforeEach(async () => {
await enableResultsSaving(true);
await enableUsersXpGain(true);
[
userGetMock,
userUpdateStreakMock,
userCheckIfTagPbMock,
userCheckIfPbMock,
userIncrementXpMock,
userUpdateTypingStatsMock,
resultAddMock,
publicUpdateStatsMock,
].forEach((it) => it.mockClear());
userGetMock.mockResolvedValue({ name: "bob" } as any);
userUpdateStreakMock.mockResolvedValue(0);
userCheckIfTagPbMock.mockResolvedValue([]);
userCheckIfPbMock.mockResolvedValue(true);
resultAddMock.mockResolvedValue({ insertedId });
userIncrementXpMock.mockResolvedValue();
});
it("should add result", async () => {
//GIVEN
const completedEvent = buildCompletedEvent({
funbox: ["58008", "read_ahead_hard"],
});
//WHEN
const { body } = await mockApp
.post("/results")
.set("Authorization", `Bearer ${uid}`)
.send({
result: completedEvent,
})
.expect(200);
expect(body.message).toEqual("Result saved");
expect(body.data).toEqual({
isPb: true,
tagPbs: [],
xp: 0,
dailyXpBonus: false,
xpBreakdown: {
accPenalty: 28,
base: 20,
incomplete: 5,
funbox: 80,
},
streak: 0,
insertedId: insertedId.toHexString(),
});
expect(resultAddMock).toHaveBeenCalledWith(
uid,
expect.objectContaining({
acc: 86,
afkDuration: 5,
charStats: [100, 2, 3, 5],
chartData: {
err: [0, 2, 0],
burst: [50, 55, 56],
wpm: [1, 2, 3],
},
consistency: 23.5,
incompleteTestSeconds: 2,
isPb: true,
keyConsistency: 12,
keyDurationStats: {
average: 2.67,
sd: 2.05,
},
keySpacingStats: {
average: 2,
sd: 1.63,
},
mode: "time",
mode2: "15",
name: "bob",
rawWpm: 99,
restartCount: 4,
tags: ["tagOneId", "tagTwoId"],
testDuration: 15.1,
uid: uid,
wpm: 80,
}),
);
expect(publicUpdateStatsMock).toHaveBeenCalledWith(
4,
15.1 + 2 - 5, //duration + incompleteTestSeconds-afk
);
expect(userIncrementXpMock).toHaveBeenCalledWith(uid, 0);
expect(userUpdateTypingStatsMock).toHaveBeenCalledWith(
uid,
4,
15.1 + 2 - 5, //duration + incompleteTestSeconds-afk
);
});
it("should fail if result saving is disabled", async () => {
//GIVEN
await enableResultsSaving(false);
//WHEN
const { body } = await mockApp
.post("/results")
.set("Authorization", `Bearer ${uid}`)
.send({})
.expect(503);
//THEN
expect(body.message).toEqual("Results are not being saved at this time.");
});
it("should fail without mandatory properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/results")
.set("Authorization", `Bearer ${uid}`)
.send({})
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: ['"result" Required'],
});
});
it("should fail with unknown properties", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/results")
.set("Authorization", `Bearer ${uid}`)
.send({
result: buildCompletedEvent({
extra2: "value",
} as any),
extra: "value",
})
.expect(422);
//THEN
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
`"result" Unrecognized key(s) in object: 'extra2'`,
"Unrecognized key(s) in object: 'extra'",
],
});
});
it("should fail wit duplicate funboxes", async () => {
//GIVEN
//WHEN
const { body } = await mockApp
.post("/results")
.set("Authorization", `Bearer ${uid}`)
.send({
result: buildCompletedEvent({
funbox: ["58008", "58008"],
}),
})
.expect(400);
//THEN
expect(body.message).toEqual("Duplicate funboxes");
});
// it("should fail invalid properties ", async () => {
//GIVEN
//WHEN
// const { body } = await mockApp
// .post("/results")
// .set("Authorization", `Bearer ${uid}`)
// //TODO add all properties
// .send({ result: { acc: 25 } })
// .expect(422);
//THEN
/*
expect(body).toEqual({
message: "Invalid request data schema",
validationErrors: [
],
});
*/
// });
});
});
function buildCompletedEvent(result?: Partial<CompletedEvent>): CompletedEvent {
return {
acc: 86,
afkDuration: 5,
bailedOut: false,
blindMode: false,
charStats: [100, 2, 3, 5],
chartData: { wpm: [1, 2, 3], burst: [50, 55, 56], err: [0, 2, 0] },
consistency: 23.5,
difficulty: "normal",
funbox: [],
hash: "hash",
incompleteTestSeconds: 2,
incompleteTests: [{ acc: 75, seconds: 10 }],
keyConsistency: 12,
keyDuration: [0, 3, 5],
keySpacing: [0, 2, 4],
language: "english",
lazyMode: false,
mode: "time",
mode2: "15",
numbers: false,
punctuation: false,
rawWpm: 99,
restartCount: 4,
tags: ["tagOneId", "tagTwoId"],
testDuration: 15.1,
timestamp: 1000,
uid,
wpmConsistency: 55,
wpm: 80,
stopOnLetter: false,
//new required
charTotal: 5,
keyOverlap: 7,
lastKeyToEnd: 9,
startToFirstKey: 11,
...result,
};
}
async function enablePremiumFeatures(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.users.premium = { ...mockConfig.users.premium, enabled };
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}
function givenDbResult(uid: string, customize?: Partial<DBResult>): DBResult {
return {
_id: new ObjectId(),
wpm: Math.random() * 100,
rawWpm: Math.random() * 100,
charStats: [
Math.round(Math.random() * 10),
Math.round(Math.random() * 10),
Math.round(Math.random() * 10),
Math.round(Math.random() * 10),
],
acc: 80 + Math.random() * 20, //min accuracy is 75%
mode: "time",
mode2: "60",
timestamp: Math.round(Math.random() * 100),
testDuration: 1 + Math.random() * 100,
consistency: Math.random() * 100,
keyConsistency: Math.random() * 100,
uid,
keySpacingStats: { average: Math.random() * 100, sd: Math.random() },
keyDurationStats: { average: Math.random() * 100, sd: Math.random() },
isPb: true,
chartData: {
wpm: [Math.random() * 100],
burst: [Math.random() * 100],
err: [Math.random() * 100],
},
name: "testName",
...customize,
};
}
async function acceptApeKeys(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.apeKeys = {
...mockConfig.apeKeys,
acceptKeys: enabled,
};
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}
async function enableResultsSaving(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.results = { ...mockConfig.results, savingEnabled: enabled };
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}
async function enableUsersXpGain(enabled: boolean): Promise<void> {
const mockConfig = await configuration;
mockConfig.users.xp = { ...mockConfig.users.xp, enabled, funboxBonus: 1 };
vi.spyOn(Configuration, "getCachedConfiguration").mockResolvedValue(
mockConfig,
);
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,81 @@
import { describe, it, expect, beforeEach, vi } from "vitest";
import { setup } from "../../__testData__/controller-test";
import GeorgeQueue from "../../../src/queues/george-queue";
import crypto from "crypto";
const { mockApp } = setup();
describe("WebhooksController", () => {
describe("githubRelease", () => {
const georgeSendReleaseAnnouncementMock = vi.spyOn(
GeorgeQueue,
"sendReleaseAnnouncement",
);
const timingSafeEqualMock = vi.spyOn(crypto, "timingSafeEqual");
beforeEach(() => {
vi.stubEnv("GITHUB_WEBHOOK_SECRET", "GITHUB_WEBHOOK_SECRET");
georgeSendReleaseAnnouncementMock.mockClear();
timingSafeEqualMock.mockClear().mockReturnValue(true);
});
it("should announce release", async () => {
//WHEN
const { body } = await mockApp
.post("/webhooks/githubRelease")
.set("x-hub-signature-256", "the-signature")
.send({ action: "published", release: { id: 1 } })
.expect(200);
//THEN
expect(body).toEqual({
message: "Added release announcement task to queue",
data: null,
});
expect(georgeSendReleaseAnnouncementMock).toHaveBeenCalledWith("1");
expect(timingSafeEqualMock).toHaveBeenCalledWith(
Buffer.from(
"sha256=ff0f3080539e9df19153f6b5b5780f66e558d61038e6cf5ecf4efdc7266a7751",
),
Buffer.from("the-signature"),
);
});
it("should ignore non-published actions", async () => {
//WHEN
const { body } = await mockApp
.post("/webhooks/githubRelease")
.set("x-hub-signature-256", "the-signature")
.send({ action: "created" })
.expect(200);
//THEN
expect(body.message).toEqual("No action taken");
expect(georgeSendReleaseAnnouncementMock).not.toHaveBeenCalled();
});
it("should ignore additional properties", async () => {
//WHEN
await mockApp
.post("/webhooks/githubRelease")
.set("x-hub-signature-256", "the-signature")
.send({
action: "published",
extra: "value",
release: { id: 1, extra2: "value" },
})
.expect(200);
});
it("should fail with missing releaseId", async () => {
//WHEN
const { body } = await mockApp
.post("/webhooks/githubRelease")
.set("x-hub-signature-256", "the-signature")
.send({ action: "published" })
.expect(422);
//THEN
expect(body.message).toEqual('Missing property "release.id".');
});
});
});

View File

@@ -0,0 +1,53 @@
import { describe, it, expect } from "vitest";
import * as Configurations from "../../src/init/configuration";
import { Configuration } from "@monkeytype/schemas/configuration";
const mergeConfigurations = Configurations.__testing.mergeConfigurations;
describe("configurations", () => {
describe("mergeConfigurations", () => {
it("should merge configurations correctly", () => {
//GIVEN
const baseConfig: Configuration = {
maintenance: false,
dev: {
responseSlowdownMs: 5,
},
quotes: {
reporting: {
enabled: false,
maxReports: 5,
},
submissionEnabled: true,
},
} as any;
const liveConfig: Partial<Configuration> = {
maintenance: true,
quotes: {
reporting: {
enabled: true,
} as any,
maxFavorites: 10,
} as any,
};
//WHEN
mergeConfigurations(baseConfig, liveConfig);
//THEN
expect(baseConfig).toEqual({
maintenance: true,
dev: {
responseSlowdownMs: 5,
},
quotes: {
reporting: {
enabled: true,
maxReports: 5,
},
submissionEnabled: true,
},
} as any);
});
});
});

View File

@@ -0,0 +1,580 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import * as AuthUtils from "../../src/utils/auth";
import * as Auth from "../../src/middlewares/auth";
import { DecodedIdToken } from "firebase-admin/auth";
import { NextFunction, Request, Response } from "express";
import { getCachedConfiguration } from "../../src/init/configuration";
import * as ApeKeys from "../../src/dal/ape-keys";
import { ObjectId } from "mongodb";
import { hashSync } from "bcrypt";
import MonkeyError from "../../src/utils/error";
import * as Misc from "../../src/utils/misc";
import crypto from "crypto";
import {
EndpointMetadata,
RequestAuthenticationOptions,
} from "@monkeytype/contracts/util/api";
import * as Prometheus from "../../src/utils/prometheus";
import { TsRestRequestWithContext } from "../../src/api/types";
import { enableMonkeyErrorExpects } from "../__testData__/monkey-error";
enableMonkeyErrorExpects();
const mockDecodedToken: DecodedIdToken = {
uid: "123456789",
email: "newuser@mail.com",
iat: 0,
} as DecodedIdToken;
vi.spyOn(AuthUtils, "verifyIdToken").mockResolvedValue(mockDecodedToken);
const mockApeKey = {
_id: new ObjectId(),
uid: "123",
name: "test",
hash: hashSync("key", 5),
createdOn: Date.now(),
modifiedOn: Date.now(),
lastUsedOn: Date.now(),
useCount: 0,
enabled: true,
};
vi.spyOn(ApeKeys, "getApeKey").mockResolvedValue(mockApeKey);
vi.spyOn(ApeKeys, "updateLastUsedOn").mockResolvedValue();
const isDevModeMock = vi.spyOn(Misc, "isDevEnvironment");
let mockRequest: Partial<TsRestRequestWithContext>;
let mockResponse: Partial<Response>;
let nextFunction: NextFunction;
describe("middlewares/auth", () => {
beforeEach(async () => {
isDevModeMock.mockReturnValue(true);
let config = await getCachedConfiguration(true);
config.apeKeys.acceptKeys = true;
mockRequest = {
baseUrl: "/api/v1",
route: {
path: "/",
},
headers: {
authorization: "Bearer 123456789",
},
ctx: {
configuration: config,
decodedToken: {
type: "None",
uid: "",
email: "",
},
},
};
mockResponse = {
json: vi.fn(),
};
nextFunction = vi.fn((error) => {
if (error) {
throw error;
}
return "Next function called";
}) as unknown as NextFunction;
});
afterEach(() => {
isDevModeMock.mockClear();
});
describe("authenticateTsRestRequest", () => {
const prometheusRecordAuthTimeMock = vi.spyOn(Prometheus, "recordAuthTime");
const prometheusIncrementAuthMock = vi.spyOn(Prometheus, "incrementAuth");
const timingSafeEqualMock = vi.spyOn(crypto, "timingSafeEqual");
beforeEach(() => {
timingSafeEqualMock.mockClear().mockReturnValue(true);
[prometheusIncrementAuthMock, prometheusRecordAuthTimeMock].forEach(
(it) => it.mockClear(),
);
});
it("should fail if token is not fresh", async () => {
//GIVEN
Date.now = vi.fn(() => 60001);
const expectedError = new MonkeyError(
401,
"Unauthorized\nStack: This endpoint requires a fresh token",
);
//WHEN
await expect(() =>
authenticate({}, { requireFreshToken: true }),
).rejects.toMatchMonkeyError(expectedError);
//THEN
expect(nextFunction).toHaveBeenLastCalledWith(
expect.toMatchMonkeyError(expectedError),
);
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow the request if token is fresh", async () => {
//GIVEN
Date.now = vi.fn(() => 10000);
//WHEN
const result = await authenticate({}, { requireFreshToken: true });
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("Bearer");
expect(decodedToken?.email).toBe(mockDecodedToken.email);
expect(decodedToken?.uid).toBe(mockDecodedToken.uid);
expect(nextFunction).toHaveBeenCalledOnce();
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("Bearer");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow the request if apeKey is supported", async () => {
//WHEN
const result = await authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ acceptApeKeys: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("ApeKey");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
});
it("should fail with apeKey if apeKey is not supported", async () => {
//WHEN
await expect(() =>
authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ acceptApeKeys: false },
),
).rejects.toThrow("This endpoint does not accept ApeKeys");
//THEN
});
it("should fail with apeKey if apeKeys are disabled", async () => {
//GIVEN
//@ts-expect-error
mockRequest.ctx.configuration.apeKeys.acceptKeys = false;
//WHEN
await expect(() =>
authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ acceptApeKeys: false },
),
).rejects.toThrow("ApeKeys are not being accepted at this time");
//THEN
});
it("should allow the request with authentation on public endpoint", async () => {
//WHEN
const result = await authenticate({}, { isPublic: true });
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("Bearer");
expect(decodedToken?.email).toBe(mockDecodedToken.email);
expect(decodedToken?.uid).toBe(mockDecodedToken.uid);
expect(nextFunction).toHaveBeenCalledTimes(1);
});
it("should allow the request without authentication on public endpoint", async () => {
//WHEN
const result = await authenticate({ headers: {} }, { isPublic: true });
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("None");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("None");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow the request with apeKey on public endpoint", async () => {
//WHEN
const result = await authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ isPublic: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("ApeKey");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("ApeKey");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow request with Uid on dev", async () => {
//WHEN
const result = await authenticate({
headers: { authorization: "Uid 123" },
});
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("Bearer");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
});
it("should allow request with Uid and email on dev", async () => {
const result = await authenticate({
headers: { authorization: "Uid 123|test@example.com" },
});
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("Bearer");
expect(decodedToken?.email).toBe("test@example.com");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
});
it("should fail request with Uid on non-dev", async () => {
//GIVEN
isDevModeMock.mockReturnValue(false);
//WHEN / THEN
await expect(() =>
authenticate({ headers: { authorization: "Uid 123" } }),
).rejects.toMatchMonkeyError(
new MonkeyError(401, "Bearer type uid is not supported"),
);
});
it("should fail without authentication", async () => {
await expect(() => authenticate({ headers: {} })).rejects.toThrow(
"Unauthorized\nStack: endpoint: /api/v1 no authorization header found",
);
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"None",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should fail with empty authentication", async () => {
await expect(() =>
authenticate({ headers: { authorization: "" } }),
).rejects.toThrow(
"Unauthorized\nStack: endpoint: /api/v1 no authorization header found",
);
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should fail with missing authentication token", async () => {
await expect(() =>
authenticate({ headers: { authorization: "Bearer" } }),
).rejects.toThrow(
"Missing authentication token\nStack: authenticateWithAuthHeader",
);
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"Bearer",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should fail with unknown authentication scheme", async () => {
await expect(() =>
authenticate({ headers: { authorization: "unknown format" } }),
).rejects.toThrow(
'Unknown authentication scheme\nStack: The authentication scheme "unknown" is not implemented',
);
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"unknown",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should record country if provided", async () => {
const prometheusRecordRequestCountryMock = vi.spyOn(
Prometheus,
"recordRequestCountry",
);
await authenticate(
{ headers: { "cf-ipcountry": "gb" } },
{ isPublic: true },
);
//THEN
expect(prometheusRecordRequestCountryMock).toHaveBeenCalledWith(
"gb",
expect.anything(),
);
});
it("should allow the request with authentation on dev public endpoint", async () => {
//WHEN
const result = await authenticate({}, { isPublicOnDev: true });
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("Bearer");
expect(decodedToken?.email).toBe(mockDecodedToken.email);
expect(decodedToken?.uid).toBe(mockDecodedToken.uid);
expect(nextFunction).toHaveBeenCalledTimes(1);
});
it("should allow the request without authentication on dev public endpoint", async () => {
//WHEN
const result = await authenticate(
{ headers: {} },
{ isPublicOnDev: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("None");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("None");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow the request with apeKey on dev public endpoint", async () => {
//WHEN
const result = await authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ acceptApeKeys: true, isPublicOnDev: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("ApeKey");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("ApeKey");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow with apeKey if apeKeys are disabled on dev public endpoint", async () => {
//GIVEN
//@ts-expect-error
mockRequest.ctx.configuration.apeKeys.acceptKeys = false;
//WHEN
const result = await authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ acceptApeKeys: true, isPublicOnDev: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("ApeKey");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("ApeKey");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow the request with authentation on dev public endpoint in production", async () => {
//WHEN
isDevModeMock.mockReturnValue(false);
const result = await authenticate({}, { isPublicOnDev: true });
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("Bearer");
expect(decodedToken?.email).toBe(mockDecodedToken.email);
expect(decodedToken?.uid).toBe(mockDecodedToken.uid);
expect(nextFunction).toHaveBeenCalledTimes(1);
});
it("should fail without authentication on dev public endpoint in production", async () => {
//WHEN
isDevModeMock.mockReturnValue(false);
//THEN
await expect(() =>
authenticate({ headers: {} }, { isPublicOnDev: true }),
).rejects.toThrow("Unauthorized");
});
it("should allow with apeKey on dev public endpoint in production", async () => {
//WHEN
isDevModeMock.mockReturnValue(false);
const result = await authenticate(
{ headers: { authorization: "ApeKey aWQua2V5" } },
{ acceptApeKeys: true, isPublicOnDev: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("ApeKey");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("123");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("ApeKey");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
});
it("should allow githubwebhook with header", async () => {
vi.stubEnv("GITHUB_WEBHOOK_SECRET", "GITHUB_WEBHOOK_SECRET");
//WHEN
const result = await authenticate(
{
headers: { "x-hub-signature-256": "the-signature" },
body: { action: "published", release: { id: 1 } },
},
{ isGithubWebhook: true },
);
//THEN
const decodedToken = result.decodedToken;
expect(decodedToken?.type).toBe("GithubWebhook");
expect(decodedToken?.email).toBe("");
expect(decodedToken?.uid).toBe("");
expect(nextFunction).toHaveBeenCalledTimes(1);
expect(prometheusIncrementAuthMock).toHaveBeenCalledWith("GithubWebhook");
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledOnce();
expect(timingSafeEqualMock).toHaveBeenCalledWith(
Buffer.from(
"sha256=ff0f3080539e9df19153f6b5b5780f66e558d61038e6cf5ecf4efdc7266a7751",
),
Buffer.from("the-signature"),
);
});
it("should fail githubwebhook with mismatched signature", async () => {
vi.stubEnv("GITHUB_WEBHOOK_SECRET", "GITHUB_WEBHOOK_SECRET");
timingSafeEqualMock.mockReturnValue(false);
await expect(() =>
authenticate(
{
headers: { "x-hub-signature-256": "the-signature" },
body: { action: "published", release: { id: 1 } },
},
{ isGithubWebhook: true },
),
).rejects.toThrow("Github webhook signature invalid");
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"None",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should fail without header when endpoint is using githubwebhook", async () => {
vi.stubEnv("GITHUB_WEBHOOK_SECRET", "GITHUB_WEBHOOK_SECRET");
await expect(() =>
authenticate(
{
headers: {},
body: { action: "published", release: { id: 1 } },
},
{ isGithubWebhook: true },
),
).rejects.toThrow("Missing Github signature header");
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"None",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should fail with missing GITHUB_WEBHOOK_SECRET when endpoint is using githubwebhook", async () => {
vi.stubEnv("GITHUB_WEBHOOK_SECRET", "");
await expect(() =>
authenticate(
{
headers: { "x-hub-signature-256": "the-signature" },
body: { action: "published", release: { id: 1 } },
},
{ isGithubWebhook: true },
),
).rejects.toThrow("Missing Github Webhook Secret");
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"None",
"failure",
expect.anything(),
expect.anything(),
);
});
it("should throw 500 if something went wrong when validating the signature when endpoint is using githubwebhook", async () => {
vi.stubEnv("GITHUB_WEBHOOK_SECRET", "GITHUB_WEBHOOK_SECRET");
timingSafeEqualMock.mockImplementation(() => {
throw new Error("could not validate");
});
await expect(() =>
authenticate(
{
headers: { "x-hub-signature-256": "the-signature" },
body: { action: "published", release: { id: 1 } },
},
{ isGithubWebhook: true },
),
).rejects.toThrow(
"Failed to authenticate Github webhook: could not validate",
);
//THEH
expect(prometheusIncrementAuthMock).not.toHaveBeenCalled();
expect(prometheusRecordAuthTimeMock).toHaveBeenCalledWith(
"None",
"failure",
expect.anything(),
expect.anything(),
);
});
});
});
async function authenticate(
request: Partial<Request>,
authenticationOptions?: RequestAuthenticationOptions,
): Promise<{ decodedToken: Auth.DecodedToken }> {
const mergedRequest = {
...mockRequest,
...request,
tsRestRoute: {
metadata: { authenticationOptions } as EndpointMetadata,
},
} as any;
await Auth.authenticateTsRestRequest()(
mergedRequest,
mockResponse as Response,
nextFunction,
);
return { decodedToken: mergedRequest.ctx.decodedToken };
}

View File

@@ -0,0 +1,201 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import { RequireConfiguration } from "@monkeytype/contracts/require-configuration/index";
import { verifyRequiredConfiguration } from "../../src/middlewares/configuration";
import { Configuration } from "@monkeytype/schemas/configuration";
import { Response } from "express";
import MonkeyError from "../../src/utils/error";
import { TsRestRequest } from "../../src/api/types";
import { enableMonkeyErrorExpects } from "../__testData__/monkey-error";
enableMonkeyErrorExpects();
describe("configuration middleware", () => {
const handler = verifyRequiredConfiguration();
const res: Response = {} as any;
const next = vi.fn();
beforeEach(() => {
next.mockClear();
});
afterEach(() => {
//next function must only be called once
expect(next).toHaveBeenCalledOnce();
});
it("should pass without requireConfiguration", async () => {
//GIVEN
const req = { tsRestRoute: { metadata: {} } } as any;
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should pass for enabled configuration", async () => {
//GIVEN
const req = givenRequest({ path: "maintenance" }, { maintenance: true });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should pass for enabled configuration with complex path", async () => {
//GIVEN
const req = givenRequest(
{ path: "users.xp.streak.enabled" },
{ users: { xp: { streak: { enabled: true } as any } as any } as any },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should fail for disabled configuration", async () => {
//GIVEN
const req = givenRequest({ path: "maintenance" }, { maintenance: false });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(503, "This endpoint is currently unavailable."),
),
);
});
it("should fail for disabled configuration and custom message", async () => {
//GIVEN
const req = givenRequest(
{ path: "maintenance", invalidMessage: "Feature not enabled." },
{ maintenance: false },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(new MonkeyError(503, "Feature not enabled.")),
);
});
it("should fail for invalid path", async () => {
//GIVEN
const req = givenRequest({ path: "invalid.path" as any }, {});
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(500, 'Invalid configuration path: "invalid.path"'),
),
);
});
it("should fail for undefined value", async () => {
//GIVEN
const req = givenRequest(
{ path: "admin.endpointsEnabled" },
{ admin: {} as any },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(
500,
'Required configuration doesnt exist: "admin.endpointsEnabled"',
),
),
);
});
it("should fail for null value", async () => {
//GIVEN
const req = givenRequest(
{ path: "admin.endpointsEnabled" },
{ admin: { endpointsEnabled: null as any } },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(
500,
'Required configuration doesnt exist: "admin.endpointsEnabled"',
),
),
);
});
it("should fail for non booean value", async () => {
//GIVEN
const req = givenRequest(
{ path: "admin.endpointsEnabled" },
{ admin: { endpointsEnabled: "disabled" as any } },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(
500,
'Required configuration is not a boolean: "admin.endpointsEnabled"',
),
),
);
});
it("should pass for multiple configurations", async () => {
//GIVEN
const req = givenRequest(
[{ path: "maintenance" }, { path: "admin.endpointsEnabled" }],
{ maintenance: true, admin: { endpointsEnabled: true } },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should fail for multiple configurations", async () => {
//GIVEN
const req = givenRequest(
[
{ path: "maintenance", invalidMessage: "maintenance mode" },
{ path: "admin.endpointsEnabled", invalidMessage: "admin disabled" },
],
{ maintenance: true, admin: { endpointsEnabled: false } },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(new MonkeyError(503, "admin disabled")),
);
});
});
function givenRequest(
requireConfiguration: RequireConfiguration | RequireConfiguration[],
configuration: Partial<Configuration>,
): TsRestRequest {
return {
tsRestRoute: { metadata: { requireConfiguration } },
ctx: { configuration },
} as any;
}

View File

@@ -0,0 +1,338 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest";
import { Response } from "express";
import { verifyPermissions } from "../../src/middlewares/permission";
import { EndpointMetadata } from "@monkeytype/contracts/util/api";
import * as Misc from "../../src/utils/misc";
import * as AdminUids from "../../src/dal/admin-uids";
import * as UserDal from "../../src/dal/user";
import MonkeyError from "../../src/utils/error";
import { DecodedToken } from "../../src/middlewares/auth";
import { TsRestRequest } from "../../src/api/types";
import { enableMonkeyErrorExpects } from "../__testData__/monkey-error";
enableMonkeyErrorExpects();
const uid = "123456789";
describe("permission middleware", () => {
const handler = verifyPermissions();
const res: Response = {} as any;
const next = vi.fn();
const getPartialUserMock = vi.spyOn(UserDal, "getPartialUser");
const isAdminMock = vi.spyOn(AdminUids, "isAdmin");
const isDevMock = vi.spyOn(Misc, "isDevEnvironment");
beforeEach(() => {
next.mockClear();
getPartialUserMock.mockClear().mockResolvedValue({} as any);
isDevMock.mockClear().mockReturnValue(false);
isAdminMock.mockClear().mockResolvedValue(false);
});
afterEach(() => {
//next function must only be called once
expect(next).toHaveBeenCalledOnce();
});
it("should bypass without requiredPermission", async () => {
//GIVEN
const req = givenRequest({});
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should bypass with empty requiredPermission", async () => {
//GIVEN
const req = givenRequest({ requirePermission: [] });
//WHEN
await handler(req, res, next);
//THE
expect(next).toHaveBeenCalledWith();
});
describe("admin check", () => {
const requireAdminPermission: EndpointMetadata = {
requirePermission: "admin",
};
it("should fail without authentication", async () => {
//GIVEN
const req = givenRequest(requireAdminPermission);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(403, "You don't have permission to do this."),
),
);
});
it("should pass without authentication if publicOnDev on dev", async () => {
//GIVEN
isDevMock.mockReturnValue(true);
const req = givenRequest(
{
...requireAdminPermission,
authenticationOptions: { isPublicOnDev: true },
},
{ uid },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should fail without authentication if publicOnDev on prod ", async () => {
//GIVEN
const req = givenRequest(
{
...requireAdminPermission,
authenticationOptions: { isPublicOnDev: true },
},
{ uid },
);
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(403, "You don't have permission to do this."),
),
);
});
it("should fail without admin permissions", async () => {
//GIVEN
const req = givenRequest(requireAdminPermission, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(403, "You don't have permission to do this."),
),
);
expect(isAdminMock).toHaveBeenCalledWith(uid);
});
});
describe("user checks", () => {
it("should fetch user only once", async () => {
//GIVEN
const req = givenRequest(
{
requirePermission: ["canReport", "canManageApeKeys"],
},
{ uid },
);
//WHEN
await handler(req, res, next);
//THEN
expect(getPartialUserMock).toHaveBeenCalledOnce();
expect(getPartialUserMock).toHaveBeenCalledWith(
uid,
"check user permissions",
["canReport", "canManageApeKeys"],
);
});
it("should fail if authentication is missing", async () => {
//GIVEN
const req = givenRequest({
requirePermission: ["canReport", "canManageApeKeys"],
});
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(
403,
"Failed to check permissions, authentication required.",
),
),
);
});
});
describe("quoteMod check", () => {
const requireQuoteMod: EndpointMetadata = {
requirePermission: "quoteMod",
};
it("should pass for quoteAdmin", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ quoteMod: true } as any);
const req = givenRequest(requireQuoteMod, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
expect(getPartialUserMock).toHaveBeenCalledWith(
uid,
"check user permissions",
["quoteMod"],
);
});
it("should pass for specific language", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ quoteMod: "english" } as any);
const req = givenRequest(requireQuoteMod, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
expect(getPartialUserMock).toHaveBeenCalledWith(
uid,
"check user permissions",
["quoteMod"],
);
});
it("should fail for empty string", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ quoteMod: "" } as any);
const req = givenRequest(requireQuoteMod, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(403, "You don't have permission to do this."),
),
);
});
it("should fail for missing quoteMod", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({} as any);
const req = givenRequest(requireQuoteMod, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(403, "You don't have permission to do this."),
),
);
});
});
describe("canReport check", () => {
const requireCanReport: EndpointMetadata = {
requirePermission: "canReport",
};
it("should fail if user cannot report", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ canReport: false } as any);
const req = givenRequest(requireCanReport, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(403, "You don't have permission to do this."),
),
);
expect(getPartialUserMock).toHaveBeenCalledWith(
uid,
"check user permissions",
["canReport"],
);
});
it("should pass if user can report", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ canReport: true } as any);
const req = givenRequest(requireCanReport, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should pass if canReport is not set", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({} as any);
const req = givenRequest(requireCanReport, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
});
describe("canManageApeKeys check", () => {
const requireCanReport: EndpointMetadata = {
requirePermission: "canManageApeKeys",
};
it("should fail if user cannot report", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ canManageApeKeys: false } as any);
const req = givenRequest(requireCanReport, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith(
expect.toMatchMonkeyError(
new MonkeyError(
403,
"You have lost access to ape keys, please contact support",
),
),
);
expect(getPartialUserMock).toHaveBeenCalledWith(
uid,
"check user permissions",
["canManageApeKeys"],
);
});
it("should pass if user can report", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({ canManageApeKeys: true } as any);
const req = givenRequest(requireCanReport, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
it("should pass if canManageApeKeys is not set", async () => {
//GIVEN
getPartialUserMock.mockResolvedValue({} as any);
const req = givenRequest(requireCanReport, { uid });
//WHEN
await handler(req, res, next);
//THEN
expect(next).toHaveBeenCalledWith();
});
});
});
function givenRequest(
metadata: EndpointMetadata,
decodedToken?: Partial<DecodedToken>,
): TsRestRequest {
return { tsRestRoute: { metadata }, ctx: { decodedToken } } as any;
}

View File

@@ -0,0 +1,42 @@
import { vi } from "vitest";
export function setupCommonMocks() {
vi.mock("../src/utils/logger", () => ({
__esModule: true,
default: {
error: console.error,
warning: console.warn,
info: console.info,
success: console.info,
logToDb: console.info,
},
}));
vi.mock("swagger-stats", () => ({
getMiddleware:
() =>
(_: unknown, __: unknown, next: () => unknown): void => {
next();
},
}));
// TODO: better approach for this when needed
// https://firebase.google.com/docs/rules/unit-tests#run_local_unit_tests_with_the_version_9_javascript_sdk
vi.mock("firebase-admin", () => ({
__esModule: true,
default: {
auth: (): unknown => ({
verifyIdToken: (
_token: string,
_checkRevoked: boolean,
): unknown /* Promise<DecodedIdToken> */ =>
Promise.resolve({
aud: "mockFirebaseProjectId",
auth_time: 123,
exp: 1000,
uid: "mockUid",
}),
}),
},
}));
}

View File

@@ -0,0 +1,40 @@
import { afterAll, beforeAll, afterEach, vi } from "vitest";
import { BASE_CONFIGURATION } from "../src/constants/base-configuration";
import { setupCommonMocks } from "./setup-common-mocks";
import { __testing } from "../src/init/configuration";
process.env["MODE"] = "dev";
process.env.TZ = "UTC";
beforeAll(async () => {
//don't add any configuration here, add to global-setup.ts instead.
vi.mock("../src/init/configuration", async (importOriginal) => {
const orig = (await importOriginal()) as { __testing: typeof __testing };
return {
__testing: orig.__testing,
getLiveConfiguration: () => BASE_CONFIGURATION,
getCachedConfiguration: () => BASE_CONFIGURATION,
patchConfiguration: vi.fn(),
};
});
vi.mock("../src/init/db", () => ({
__esModule: true,
getDb: () => undefined,
collection: () => undefined,
close: () => {
//
},
}));
setupCommonMocks();
});
afterEach(async () => {
//nothing
});
afterAll(async () => {
vi.resetAllMocks();
});

View File

@@ -0,0 +1,12 @@
{
"extends": "@monkeytype/typescript-config/base.json",
"compilerOptions": {
"noEmit": true,
"noImplicitAny": false,
"strictFunctionTypes": false,
"useUnknownInCatchVariables": false,
"strictPropertyInitialization": false
},
"files": ["vitest.d.ts"],
"include": ["./**/*.ts", "./**/*.spec.ts", "./setup-tests.ts"]
}

View File

@@ -0,0 +1,498 @@
import { describe, it, expect, afterAll, vi } from "vitest";
import * as Misc from "../../src/utils/misc";
import { ObjectId } from "mongodb";
describe("Misc Utils", () => {
afterAll(() => {
vi.useRealTimers();
});
describe("matchesAPattern", () => {
const testCases = [
{
pattern: "eng.*",
cases: ["english", "aenglish", "en", "eng"],
expected: [true, false, false, true],
},
{
pattern: "\\d+",
cases: ["b", "2", "331", "1a"],
expected: [false, true, true, false],
},
{
pattern: "(hi|hello)",
cases: ["hello", "hi", "hillo", "hi hello"],
expected: [true, true, false, false],
},
{
pattern: ".+",
cases: ["a2", "b2", "c1", ""],
expected: [true, true, true, false],
},
];
it.each(testCases)(
"matchesAPattern with $pattern",
({ pattern, cases, expected }) => {
cases.forEach((caseValue, index) => {
expect(Misc.matchesAPattern(caseValue, pattern)).toBe(
expected[index],
);
});
},
);
});
describe("kogascore", () => {
const testCases = [
{
wpm: 214.8,
acc: 93.04,
timestamp: 1653586489000,
expectedScore: 1214800930423111,
},
{
wpm: 214.8,
acc: 93.04,
timestamp: 1653601763000,
expectedScore: 1214800930407837,
},
{
wpm: 199.37,
acc: 97.69,
timestamp: 1653588809000,
expectedScore: 1199370976920791,
},
{
wpm: 196.2,
acc: 96.07,
timestamp: 1653591901000,
expectedScore: 1196200960717699,
},
{
wpm: 196.205,
acc: 96.075,
timestamp: 1653591901000,
expectedScore: 1196210960817699,
},
{
// this one is particularly important - in JS 154.39 * 100 is equal to 15438.999999999998
// thanks floating point errors!
wpm: 154.39,
acc: 96.14,
timestamp: 1740333827000,
expectedScore: 1154390961421373,
},
];
it.each(testCases)(
"kogascore with wpm:$wpm, acc:$acc, timestamp:$timestamp = $expectedScore",
({ wpm, acc, timestamp, expectedScore }) => {
expect(Misc.kogascore(wpm, acc, timestamp)).toBe(expectedScore);
},
);
});
describe("identity", () => {
const testCases = [
{
input: "",
expected: "string",
},
{
input: {},
expected: "object",
},
{
input: 0,
expected: "number",
},
{
input: null,
expected: "null",
},
{
input: undefined,
expected: "undefined",
},
];
it.each(testCases)(
"identity with $input = $expected",
({ input, expected }) => {
expect(Misc.identity(input)).toBe(expected);
},
);
});
describe("flattenObjectDeep", () => {
const testCases = [
{
obj: {
a: {
b: {
c: 1,
},
},
d: 2,
e: [],
},
expected: {
"a.b.c": 1,
d: 2,
e: [],
},
},
{
obj: {
a: {
b: {
c: 1,
},
},
d: {
e: {
f: 2,
g: 3,
},
},
},
expected: {
"a.b.c": 1,
"d.e.f": 2,
"d.e.g": 3,
},
},
{
obj: {
a: {
b: {
c: 1,
d: {
e: 2,
f: 3,
g: {},
},
},
},
},
expected: {
"a.b.c": 1,
"a.b.d.e": 2,
"a.b.d.f": 3,
"a.b.d.g": {},
},
},
{
obj: {},
expected: {},
},
];
it.each(testCases)(
"flattenObjectDeep with $obj = $expected",
({ obj, expected }) => {
expect(Misc.flattenObjectDeep(obj)).toEqual(expected);
},
);
});
it("sanitizeString", () => {
const testCases = [
{
input: "h̶̼͔̭͈̏́̀́͋͜ͅe̵̺̞̦̫̫͔̋́̅̅̃̀͝͝ļ̶̬̯͚͇̺͍̞̫̟͖͋̓͛̆̒̓͜ĺ̴̗̘͇̬̆͂͌̈͊͝͝ỡ̴̡̦̩̠̞̐̃͆̚͠͝",
expected: "hello",
},
{
input: "hello",
expected: "hello",
},
{
input: "hel lo",
expected: "hel lo",
},
{
input: " hel lo ",
expected: "hel lo",
},
{
input: "",
expected: "",
},
{
input: " \n\n\n",
expected: "",
},
{
input: undefined,
expected: undefined,
},
];
testCases.forEach(({ input, expected }) => {
expect(Misc.sanitizeString(input)).toEqual(expected);
});
});
it("getOrdinalNumberString", () => {
const testCases = [
{
input: 0,
output: "0th",
},
{
input: 1,
output: "1st",
},
{
input: 2,
output: "2nd",
},
{
input: 3,
output: "3rd",
},
{
input: 4,
output: "4th",
},
{
input: 10,
output: "10th",
},
{
input: 11,
output: "11th",
},
{
input: 12,
output: "12th",
},
{
input: 13,
output: "13th",
},
{
input: 100,
output: "100th",
},
{
input: 101,
output: "101st",
},
{
input: 102,
output: "102nd",
},
{
input: 103,
output: "103rd",
},
{
input: 104,
output: "104th",
},
{
input: 93589423,
output: "93589423rd",
},
];
testCases.forEach(({ input, output }) => {
expect(Misc.getOrdinalNumberString(input)).toEqual(output);
});
});
it("formatSeconds", () => {
const testCases = [
{
seconds: 5,
expected: "5 seconds",
},
{
seconds: 65,
expected: "1.08 minutes",
},
{
seconds: Misc.HOUR_IN_SECONDS,
expected: "1 hour",
},
{
seconds: Misc.DAY_IN_SECONDS,
expected: "1 day",
},
{
seconds: Misc.WEEK_IN_SECONDS,
expected: "1 week",
},
{
seconds: Misc.YEAR_IN_SECONDS,
expected: "1 year",
},
{
seconds: 2 * Misc.YEAR_IN_SECONDS,
expected: "2 years",
},
{
seconds: 4 * Misc.YEAR_IN_SECONDS,
expected: "4 years",
},
{
seconds: 3 * Misc.WEEK_IN_SECONDS,
expected: "3 weeks",
},
{
seconds: Misc.MONTH_IN_SECONDS * 4,
expected: "4 months",
},
{
seconds: Misc.MONTH_IN_SECONDS * 11,
expected: "11 months",
},
];
testCases.forEach(({ seconds, expected }) => {
expect(Misc.formatSeconds(seconds)).toBe(expected);
});
});
describe("replaceObjectId", () => {
it("replaces objecId with string", () => {
const fromDatabase = {
_id: new ObjectId(),
test: "test",
number: 1,
};
expect(Misc.replaceObjectId(fromDatabase)).toStrictEqual({
_id: fromDatabase._id.toHexString(),
test: "test",
number: 1,
});
});
it("ignores null values", () => {
expect(Misc.replaceObjectId(null)).toBeNull();
});
});
describe("replaceObjectIds", () => {
it("replaces objecIds with string", () => {
const fromDatabase = {
_id: new ObjectId(),
test: "test",
number: 1,
};
const fromDatabase2 = {
_id: new ObjectId(),
test: "bob",
number: 2,
};
expect(
Misc.replaceObjectIds([fromDatabase, fromDatabase2]),
).toStrictEqual([
{
_id: fromDatabase._id.toHexString(),
test: "test",
number: 1,
},
{
_id: fromDatabase2._id.toHexString(),
test: "bob",
number: 2,
},
]);
});
it("handles undefined", () => {
expect(Misc.replaceObjectIds(undefined as any)).toBeUndefined();
});
});
describe("omit()", () => {
it("should omit a single key", () => {
const input = { a: 1, b: 2, c: 3 };
const result = Misc.omit(input, ["b"]);
expect(result).toEqual({ a: 1, c: 3 });
});
it("should omit multiple keys", () => {
const input = { a: 1, b: 2, c: 3, d: 4 };
const result = Misc.omit(input, ["a", "d"]);
expect(result).toEqual({ b: 2, c: 3 });
});
it("should return the same object if no keys are omitted", () => {
const input = { x: 1, y: 2 };
const result = Misc.omit(input, []);
expect(result).toEqual({ x: 1, y: 2 });
});
it("should not mutate the original object", () => {
const input = { foo: "bar", baz: "qux" };
const copy = { ...input };
Misc.omit(input, ["baz"]);
expect(input).toEqual(copy);
});
it("should ignore keys that do not exist", () => {
const input = { a: 1, b: 2 };
const result = Misc.omit(input, "c" as any); // allow a non-existing key
expect(result).toEqual({ a: 1, b: 2 });
});
it("should work with different value types", () => {
const input = {
str: "hello",
num: 123,
bool: true,
obj: { x: 1 },
arr: [1, 2, 3],
};
const result = Misc.omit(input, ["bool", "arr"]);
expect(result).toEqual({
str: "hello",
num: 123,
obj: { x: 1 },
});
});
});
describe("isPlainObject", () => {
it("should return true for plain objects", () => {
expect(Misc.isPlainObject({})).toBe(true);
expect(Misc.isPlainObject({ a: 1, b: 2 })).toBe(true);
expect(Misc.isPlainObject(Object.create(Object.prototype))).toBe(true);
});
it("should return false for arrays", () => {
expect(Misc.isPlainObject([])).toBe(false);
expect(Misc.isPlainObject([1, 2, 3])).toBe(false);
});
it("should return false for null", () => {
expect(Misc.isPlainObject(null)).toBe(false);
});
it("should return false for primitives", () => {
expect(Misc.isPlainObject(123)).toBe(false);
expect(Misc.isPlainObject("string")).toBe(false);
expect(Misc.isPlainObject(true)).toBe(false);
expect(Misc.isPlainObject(undefined)).toBe(false);
expect(Misc.isPlainObject(Symbol("sym"))).toBe(false);
});
it("should return false for objects with different prototypes", () => {
// oxlint-disable-next-line no-extraneous-class
class MyClass {}
expect(Misc.isPlainObject(new MyClass())).toBe(false);
expect(Misc.isPlainObject(Object.create(null))).toBe(false);
expect(Misc.isPlainObject(new Date())).toBe(false);
expect(Misc.isPlainObject(new Map())).toBe(false);
expect(Misc.isPlainObject(new Set())).toBe(false);
});
it("should return false for functions", () => {
// oxlint-disable-next-line no-empty-function
expect(Misc.isPlainObject(function () {})).toBe(false);
// oxlint-disable-next-line no-empty-function
expect(Misc.isPlainObject(() => {})).toBe(false);
});
});
});

View File

@@ -0,0 +1,21 @@
import { describe, it, expect } from "vitest";
import { buildMonkeyMail } from "../../src/utils/monkey-mail";
describe("Monkey Mail", () => {
it("should properly create a mail object", () => {
const mailConfig = {
subject: "",
body: "",
timestamp: Date.now(),
};
const mail = buildMonkeyMail(mailConfig) as any;
expect(mail.id).toBeDefined();
expect(mail.subject).toBe("");
expect(mail.body).toBe("");
expect(mail.timestamp).toBeDefined();
expect(mail.read).toBe(false);
expect(mail.rewards).toEqual([]);
});
});

View File

@@ -0,0 +1,213 @@
import { describe, it, expect } from "vitest";
import * as pb from "../../src/utils/pb";
import { Mode, PersonalBests } from "@monkeytype/schemas/shared";
import { Result } from "@monkeytype/schemas/results";
import { FunboxName } from "@monkeytype/schemas/configs";
describe("Pb Utils", () => {
describe("funboxCatGetPb", () => {
const testCases: { funbox: FunboxName[] | undefined; expected: boolean }[] =
[
{
funbox: ["plus_one"],
expected: true,
},
{
funbox: [],
expected: true,
},
{
funbox: undefined,
expected: true,
},
{
funbox: ["nausea", "plus_one"],
expected: true,
},
{
funbox: ["arrows"],
expected: false,
},
];
it.each(testCases)(
"canFunboxGetPb with $funbox = $expected",
({ funbox, expected }) => {
const result = pb.canFunboxGetPb({ funbox } as any);
expect(result).toBe(expected);
},
);
});
describe("checkAndUpdatePb", () => {
it("should update personal best", () => {
const userPbs: PersonalBests = {
time: {},
words: {},
custom: {},
quote: {},
zen: {},
};
const result = {
difficulty: "normal",
language: "english",
punctuation: false,
lazyMode: false,
acc: 100,
consistency: 100,
rawWpm: 100,
wpm: 110,
numbers: false,
mode: "time",
mode2: "15",
} as unknown as Result<Mode>;
const run = pb.checkAndUpdatePb(
userPbs,
{} as pb.LbPersonalBests,
result,
);
expect(run.isPb).toBe(true);
expect(run.personalBests.time?.["15"]?.[0]).not.toBe(undefined);
expect(run.lbPersonalBests).not.toBe({});
});
it("should not override default pb when saving numbers test", () => {
const userPbs: PersonalBests = {
time: {
"15": [
{
acc: 100,
consistency: 100,
difficulty: "normal",
lazyMode: false,
language: "english",
numbers: false,
punctuation: false,
raw: 100,
timestamp: 0,
wpm: 100,
},
],
},
words: {},
custom: {},
quote: {},
zen: {},
};
const result = {
difficulty: "normal",
language: "english",
punctuation: false,
lazyMode: false,
acc: 100,
consistency: 100,
rawWpm: 100,
wpm: 110,
numbers: true,
mode: "time",
mode2: "15",
} as unknown as Result<Mode>;
const run = pb.checkAndUpdatePb(userPbs, undefined, result);
expect(run.isPb).toBe(true);
expect(run.personalBests.time?.["15"]).toEqual(
expect.arrayContaining([
expect.objectContaining({ numbers: false, wpm: 100 }),
expect.objectContaining({ numbers: true, wpm: 110 }),
]),
);
});
});
describe("updateLeaderboardPersonalBests", () => {
const userPbs: PersonalBests = {
time: {
"15": [
{
acc: 100,
consistency: 100,
difficulty: "normal",
lazyMode: false,
language: "english",
numbers: false,
punctuation: false,
raw: 100,
timestamp: 0,
wpm: 100,
},
{
acc: 100,
consistency: 100,
difficulty: "normal",
lazyMode: false,
language: "spanish",
numbers: false,
punctuation: false,
raw: 100,
timestamp: 0,
wpm: 100,
},
],
},
words: {},
custom: {},
quote: {},
zen: {},
};
it("should update leaderboard personal bests if they dont exist or the structure is incomplete", () => {
const lbpbstartingvalues = [
undefined,
{},
{ time: {} },
{ time: { "15": {} } },
{ time: { "15": { english: {} } } },
];
const result15 = {
mode: "time",
mode2: "15",
} as unknown as Result<Mode>;
for (const lbPb of lbpbstartingvalues) {
const lbPbPb = pb.updateLeaderboardPersonalBests(
userPbs,
structuredClone(lbPb) as pb.LbPersonalBests,
result15,
);
expect(lbPbPb).toEqual({
time: {
"15": {
english: {
acc: 100,
consistency: 100,
difficulty: "normal",
lazyMode: false,
language: "english",
numbers: false,
punctuation: false,
raw: 100,
timestamp: 0,
wpm: 100,
},
spanish: {
acc: 100,
consistency: 100,
difficulty: "normal",
lazyMode: false,
language: "spanish",
numbers: false,
punctuation: false,
raw: 100,
timestamp: 0,
wpm: 100,
},
},
},
});
}
});
});
});

View File

@@ -0,0 +1,220 @@
import { describe, it, expect } from "vitest";
import { replaceLegacyValues, DBResult } from "../../src/utils/result";
describe("Result Utils", () => {
describe("replaceLegacyValues", () => {
describe("legacy charStats conversion", () => {
it.each([
{
description:
"should convert correctChars and incorrectChars to charStats",
correctChars: 95,
incorrectChars: 5,
expectedCharStats: [95, 5, 0, 0],
},
{
description: "should handle zero values for legacy chars",
correctChars: 0,
incorrectChars: 0,
expectedCharStats: [0, 0, 0, 0],
},
{
description: "should handle large values for legacy chars",
correctChars: 9999,
incorrectChars: 1234,
expectedCharStats: [9999, 1234, 0, 0],
},
])(
"$description",
({ correctChars, incorrectChars, expectedCharStats }) => {
const resultWithLegacyChars: DBResult = {
correctChars,
incorrectChars,
} as any;
const result = replaceLegacyValues(resultWithLegacyChars);
expect(result.charStats).toEqual(expectedCharStats);
expect(result.correctChars).toBeUndefined();
expect(result.incorrectChars).toBeUndefined();
},
);
it("should prioritise charStats when legacy data exists", () => {
const resultWithBothFormats: DBResult = {
charStats: [80, 4, 2, 1],
correctChars: 95,
incorrectChars: 5,
} as any;
const result = replaceLegacyValues(resultWithBothFormats);
// Should convert legacy values and overwrite existing charStats
expect(result.charStats).toEqual([80, 4, 2, 1]);
// Legacy values should be removed after conversion
expect(result.correctChars).toBeUndefined();
expect(result.incorrectChars).toBeUndefined();
});
it.each([
{
description:
"should not convert when only one legacy property is present",
input: { correctChars: 95 },
expectedCharStats: undefined,
expectedCorrectChars: 95,
expectedIncorrectChars: undefined,
},
{
description: "should not convert when only incorrectChars is present",
input: { incorrectChars: 5 },
expectedCharStats: undefined,
expectedCorrectChars: undefined,
expectedIncorrectChars: 5,
},
])(
"$description",
({
input,
expectedCharStats,
expectedCorrectChars,
expectedIncorrectChars,
}) => {
const result = replaceLegacyValues(input as any);
// Should not convert since both properties are required
expect(result.charStats).toBe(expectedCharStats);
expect(result.correctChars).toBe(expectedCorrectChars);
expect(result.incorrectChars).toBe(expectedIncorrectChars);
},
);
});
describe("legacy funbox conversion", () => {
it.each([
{
description: "should convert string funbox to array",
input: "memory#mirror",
expected: ["memory", "mirror"],
},
{
description: "should convert single funbox string to array",
input: "memory",
expected: ["memory"],
},
{
description: "should convert 'none' funbox to empty array",
input: "none",
expected: [],
},
{
description: "should handle complex funbox combinations",
input: "memory#mirror#arrows#58008",
expected: ["memory", "mirror", "arrows", "58008"],
},
])("$description", ({ input, expected }) => {
const resultWithStringFunbox: DBResult = {
funbox: input as any,
} as any;
const result = replaceLegacyValues(resultWithStringFunbox);
expect(result.funbox).toEqual(expected);
});
});
describe("legacy chartData conversion", () => {
it("should convert chartData with 'raw' property to 'burst'", () => {
const resultWithLegacyChartData: DBResult = {
chartData: {
wpm: [50, 55, 60],
raw: [52, 57, 62],
err: [1, 0, 2],
} as any,
} as any;
const result = replaceLegacyValues(resultWithLegacyChartData);
expect(result.chartData).toEqual({
wpm: [50, 55, 60],
burst: [52, 57, 62],
err: [1, 0, 2],
});
});
it("should not convert chartData when it's 'toolong'", () => {
const resultWithToolongChartData: DBResult = {
chartData: "toolong",
} as any;
const result = replaceLegacyValues(resultWithToolongChartData);
expect(result.chartData).toBe("toolong");
});
it("should not convert chartData when it doesn't have 'raw' property", () => {
const resultWithModernChartData: DBResult = {
chartData: {
wpm: [50, 55, 60],
burst: [52, 57, 62],
err: [1, 0, 2],
},
} as any;
const result = replaceLegacyValues(resultWithModernChartData);
expect(result.chartData).toEqual({
wpm: [50, 55, 60],
burst: [52, 57, 62],
err: [1, 0, 2],
});
});
it("should not convert chartData when it's undefined", () => {
const resultWithoutChartData: DBResult = {} as any;
const result = replaceLegacyValues(resultWithoutChartData);
expect(result.chartData).toBeUndefined();
});
});
it("should convert all legacy data at once", () => {
const resultWithAllLegacy: DBResult = {
correctChars: 100,
incorrectChars: 8,
funbox: "memory#mirror" as any,
chartData: {
wpm: [50, 55, 60],
raw: [52, 57, 62],
err: [1, 0, 2],
} as any,
} as any;
const result = replaceLegacyValues(resultWithAllLegacy);
expect(result.charStats).toEqual([100, 8, 0, 0]);
expect(result.correctChars).toBeUndefined();
expect(result.incorrectChars).toBeUndefined();
expect(result.funbox).toEqual(["memory", "mirror"]);
expect(result.chartData).toEqual({
wpm: [50, 55, 60],
burst: [52, 57, 62],
err: [1, 0, 2],
});
});
describe("no legacy values", () => {
it("should return result unchanged when no legacy values present", () => {
const modernResult: DBResult = {
charStats: [95, 5, 2, 1],
funbox: ["memory"],
} as any;
const result = replaceLegacyValues(modernResult);
expect(result).toEqual(modernResult);
});
});
});
});

View File

@@ -0,0 +1,55 @@
import { describe, it, expect } from "vitest";
import * as Validation from "../../src/utils/validation";
describe("Validation", () => {
it("isTestTooShort", () => {
const testCases = [
{
result: {
mode: "time",
mode2: 10,
customText: undefined,
testDuration: 10,
bailedOut: false,
},
expected: true,
},
{
result: {
mode: "time",
mode2: 15,
customText: undefined,
testDuration: 15,
bailedOut: false,
},
expected: false,
},
{
result: {
mode: "time",
mode2: 0,
customText: undefined,
testDuration: 20,
bailedOut: false,
},
expected: false,
},
{
result: {
mode: "time",
mode2: 0,
customText: undefined,
testDuration: 2,
bailedOut: false,
},
expected: true,
},
];
testCases.forEach((testCase) => {
expect(Validation.isTestTooShort(testCase.result as any)).toBe(
testCase.expected,
);
});
});
});

32
backend/__tests__/vitest.d.ts vendored Normal file
View File

@@ -0,0 +1,32 @@
import type { Assertion, AsymmetricMatchersContaining } from "vitest";
import type { Test as SuperTest } from "supertest";
import MonkeyError from "../src/utils/error";
type ExpectedRateLimit = {
/** max calls */
max: number;
/** window in milliseconds. Needs to be within 2500ms */
windowMs: number;
};
interface RestRequestMatcher<R = Supertest> {
toBeRateLimited: (expected: ExpectedRateLimit) => RestRequestMatcher<R>;
}
interface ThrowMatcher {
toMatchMonkeyError: (expected: {
status: number;
message: string;
}) => MatcherResult;
}
declare module "vitest" {
interface Assertion<T = any> extends RestRequestMatcher<T>, ThrowMatcher {}
interface AsymmetricMatchersContaining
extends RestRequestMatcher, ThrowMatcher {}
}
interface MatcherResult {
pass: boolean;
message: () => string;
actual?: unknown;
expected?: unknown;
}

View File

@@ -0,0 +1,33 @@
import { describe, it, expect } from "vitest";
import * as LaterWorker from "../../src/workers/later-worker";
const calculateXpReward = LaterWorker.__testing.calculateXpReward;
describe("later-worker", () => {
describe("calculateXpReward", () => {
it("should return the correct XP reward for a given rank", () => {
//GIVEN
const xpRewardBrackets = [
{ minRank: 1, maxRank: 1, minReward: 100, maxReward: 100 },
{ minRank: 2, maxRank: 10, minReward: 50, maxReward: 90 },
];
//WHEN / THEN
expect(calculateXpReward(xpRewardBrackets, 5)).toBe(75);
expect(calculateXpReward(xpRewardBrackets, 11)).toBeUndefined();
});
it("should return the highest XP reward if brackets overlap", () => {
//GIVEN
const xpRewardBrackets = [
{ minRank: 1, maxRank: 5, minReward: 900, maxReward: 1000 },
{ minRank: 2, maxRank: 20, minReward: 50, maxReward: 90 },
];
//WHEN
const reward = calculateXpReward(xpRewardBrackets, 5);
//THEN
expect(reward).toBe(900);
});
});
});

View File

@@ -0,0 +1,24 @@
name: monkeytype
version: "3.8"
services:
redis:
container_name: monkeytype-redis
image: redis:6.2.6
restart: on-failure
ports:
- "${DOCKER_REDIS_PORT:-6379}:6379"
volumes:
- redis-data:/data
mongodb:
container_name: monkeytype-mongodb
image: mongo:5.0.13
restart: on-failure
ports:
- "${DOCKER_DB_PORT:-27017}:27017"
volumes:
- mongo-data:/data/db
volumes:
mongo-data:
redis-data:

View File

@@ -0,0 +1,49 @@
name: monkeytype
version: "3.8"
services:
redis:
container_name: monkeytype-redis
image: redis:6.2.6
restart: on-failure
ports:
- "${DOCKER_REDIS_PORT:-6379}:6379"
volumes:
- redis-data:/data
mongodb:
container_name: monkeytype-mongodb
image: mongo:5.0.13
restart: on-failure
ports:
- "${DOCKER_DB_PORT:-27017}:27017"
volumes:
- mongo-data:/data/db
api-server:
container_name: monkeytype-api-server
build:
dockerfile_inline: |
FROM node:24.11.0
RUN npm i -g pnpm@10.28.1
RUN mkdir /pnpm-store && chown -R 1000:1000 /pnpm-store
user: "node" ##this works as long as your local user has uid=1000
restart: on-failure
depends_on:
- redis
- mongodb
environment:
- DB_URI=mongodb://mongodb:27017
- REDIS_URI=redis://redis:6379
ports:
- "${DOCKER_SERVER_PORT:-5005}:5005"
volumes:
- ../../:/monkeytype
entrypoint: 'bash -c "echo starting, this may take a while... \
&& cd /monkeytype \
&& pnpm config set store-dir /pnpm-store
&& pnpm i \
&& npm run dev-be"'
volumes:
mongo-data:
redis-data:

View File

@@ -0,0 +1,137 @@
<mjml>
<mj-head>
<mj-style>
@import
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css";
.btn table{ width: 100%; } .btn a{ width: 100%; padding: 10px 0
!important;}
</mj-style>
</mj-head>
<mj-body background-color="#323437">
<mj-wrapper padding="20px 20px 200px 20px">
<mj-section padding="0px" padding-bottom="20px">
<mj-column width="600px">
<mj-image
width="200px"
src="https://github.com/monkeytypegame/monkeytype/blob/master/frontend/static/images/mtfulllogo.png?raw=true"
href="monkeytype.com"
align="left"
></mj-image>
</mj-column>
</mj-section>
<mj-section padding="0px">
<mj-column background-color="#2c2e31" border-radius="8px">
<mj-spacer></mj-spacer>
<mj-text color="#d1d0c5" font-size="20px" font-family="sans-serif">
Hey, {{name}}
</mj-text>
<mj-text
color="#d1d0c5"
font-size="16px"
line-height="24px"
font-family="sans-serif"
>
Nobody likes being locked out of their account. We're coming to your
rescue - just click the button below to get started. If you didn't
request a password reset, you can safely ignore this email.
</mj-text>
<mj-button
align="left"
background-color="#e2b714"
color="#323437"
font-size="16px"
line-height="32px"
css-class="btn"
href="{{passwordResetLink}}"
font-family="sans-serif"
>
Reset your password
</mj-button>
<mj-text
color="#d1d0c5"
font-size="16px"
line-height="24px"
padding-bottom="0px"
font-family="sans-serif"
>
Cheers,
</mj-text>
<mj-text
color="#d1d0c5"
font-size="16px"
line-height="24px"
padding-top="0px"
font-family="sans-serif"
>
Monkeytype Team
</mj-text>
<mj-divider border-color="#323437"></mj-divider>
<mj-text
color="#646669"
font-size="12px"
padding-bottom="0px"
font-family="sans-serif"
>
Alternatively, you can copy and paste the link below into your
browser:
</mj-text>
<mj-text color="#646669" font-size="12px" font-family="sans-serif">
{{passwordResetLink}}
</mj-text>
<mj-spacer></mj-spacer>
</mj-column>
</mj-section>
<mj-section padding-bottom="6px" padding-top="20px">
<mj-column width="50px">
<mj-button
font-size="20px"
padding="10px"
inner-padding="0"
color="#d1d0c5"
background-color="#323437"
href="https://github.com/monkeytypegame/monkeytype"
>
<i class="fab fa-fw fa-github"></i>
</mj-button>
</mj-column>
<mj-column width="50px">
<mj-button
font-size="20px"
padding="10px"
inner-padding="0"
color="#d1d0c5"
background-color="#323437"
href="https://x.com/monkeytype"
>
<i class="fab fa-fw fa-twitter"></i>
</mj-button>
</mj-column>
<mj-column width="50px">
<mj-button
font-size="20px"
padding="10px"
inner-padding="0"
color="#d1d0c5"
background-color="#323437"
href="https://discord.com/invite/monkeytype"
>
<i class="fab fa-fw fa-discord"></i>
</mj-button>
</mj-column>
</mj-section>
<mj-section padding-top="0">
<mj-column>
<mj-text align="center" color="#646669" background-color="#323437">
monkeytype.com
</mj-text>
</mj-column>
</mj-section>
</mj-wrapper>
</mj-body>
</mjml>

View File

@@ -0,0 +1,137 @@
<mjml>
<mj-head>
<mj-style>
@import
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.4/css/all.min.css";
.btn table{ width: 100%; } .btn a{ width: 100%; padding: 10px 0
!important;}
</mj-style>
</mj-head>
<mj-body background-color="#323437">
<mj-wrapper padding="20px 20px 200px 20px">
<mj-section padding="0px" padding-bottom="20px">
<mj-column width="600px">
<mj-image
width="200px"
src="https://github.com/monkeytypegame/monkeytype/blob/master/frontend/static/images/mtfulllogo.png?raw=true"
href="monkeytype.com"
align="left"
></mj-image>
</mj-column>
</mj-section>
<mj-section padding="0px">
<mj-column background-color="#2c2e31" border-radius="8px">
<mj-spacer></mj-spacer>
<mj-text color="#d1d0c5" font-size="20px" font-family="sans-serif">
Hey, {{name}}
</mj-text>
<mj-text
color="#d1d0c5"
font-size="16px"
line-height="24px"
font-family="sans-serif"
>
Thanks for joining Monkeytype! We just need one more thing from you
- a quick confirmation of your email address and you'll be all set.
Click the button below to get started:
</mj-text>
<mj-button
align="left"
background-color="#e2b714"
color="#323437"
font-size="16px"
line-height="32px"
css-class="btn"
href="{{verificationLink}}"
font-family="sans-serif"
>
Verify
</mj-button>
<mj-text
color="#d1d0c5"
font-size="16px"
line-height="24px"
padding-bottom="0px"
font-family="sans-serif"
>
Cheers,
</mj-text>
<mj-text
color="#d1d0c5"
font-size="16px"
line-height="24px"
padding-top="0px"
font-family="sans-serif"
>
Monkeytype Team
</mj-text>
<mj-divider border-color="#323437"></mj-divider>
<mj-text
color="#646669"
font-size="12px"
padding-bottom="0px"
font-family="sans-serif"
>
Alternatively, you can copy and paste the link below into your
browser:
</mj-text>
<mj-text color="#646669" font-size="12px" font-family="sans-serif">
{{verificationLink}}
</mj-text>
<mj-spacer></mj-spacer>
</mj-column>
</mj-section>
<mj-section padding-bottom="6px" padding-top="20px">
<mj-column width="50px">
<mj-button
font-size="20px"
padding="10px"
inner-padding="0"
color="#d1d0c5"
background-color="#323437"
href="https://github.com/monkeytypegame/monkeytype"
>
<i class="fab fa-fw fa-github"></i>
</mj-button>
</mj-column>
<mj-column width="50px">
<mj-button
font-size="20px"
padding="10px"
inner-padding="0"
color="#d1d0c5"
background-color="#323437"
href="https://x.com/monkeytype"
>
<i class="fab fa-fw fa-twitter"></i>
</mj-button>
</mj-column>
<mj-column width="50px">
<mj-button
font-size="20px"
padding="10px"
inner-padding="0"
color="#d1d0c5"
background-color="#323437"
href="https://discord.com/invite/monkeytype"
>
<i class="fab fa-fw fa-discord"></i>
</mj-button>
</mj-column>
</mj-section>
<mj-section padding-top="0">
<mj-column>
<mj-text align="center" color="#646669" background-color="#323437">
monkeytype.com
</mj-text>
</mj-column>
</mj-section>
</mj-wrapper>
</mj-body>
</mjml>

25
backend/example.env Normal file
View File

@@ -0,0 +1,25 @@
DB_NAME=monkeytype
DOCKER_SERVER_PORT=5005
DOCKER_DB_PORT=27017
DB_URI=mongodb://localhost:27017
DOCKER_REDIS_PORT=6379
REDIS_URI=redis://localhost:6379
LOG_FOLDER_PATH=./logs/
# Default log file max size is 10 MB
LOG_FILE_MAX_SIZE=10485760
MODE=dev
RECAPTCHA_SECRET=6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe
# You can also use the format mongodb://username:password@host:port or
# uncomment the following lines if you want to define them separately
# DB_USERNAME=
# DB_PASSWORD=
# DB_AUTH_MECHANISM="SCRAM-SHA-256"
# DB_AUTH_SOURCE=admin
# You can get a testing email address over at
# https://ethereal.email/create
#
# EMAIL_PORT=587
# EMAIL_HOST=smtp.ethereal.email
# EMAIL_USER=
# EMAIL_PASS=

96
backend/package.json Normal file
View File

@@ -0,0 +1,96 @@
{
"name": "@monkeytype/backend",
"version": "1.14.3",
"private": true,
"license": "GPL-3.0",
"scripts": {
"lint": "oxlint . --type-aware --type-check",
"lint-fast": "oxlint .",
"build": "npm run gen-docs && tsc --build",
"watch": "tsc --build --watch",
"clean": "tsc --build --clean",
"ts-check": "tsc --noEmit",
"start": "node ./dist/server.js",
"test": "vitest run --project=unit",
"integration-test": "vitest run --project=integration --project=integration-isolated",
"test-coverage": "vitest run --coverage",
"dev": "concurrently -p none \"tsx watch --clear-screen=false --inspect ./src/server.ts\" \"tsc --preserveWatchOutput --noEmit --watch\" \"esw src/ -w --ext .ts --cache --color\"",
"docker-db-only": "docker compose --env-file .env -f docker/compose.db-only.yml up",
"docker": "docker compose --env-file .env -f docker/compose.yml up",
"gen-docs": "tsx scripts/openapi.ts dist/static/api/openapi.json && redocly build-docs -o dist/static/api/internal.html internal@v2 && redocly bundle -o dist/static/api/public.json public-filter && redocly build-docs -o dist/static/api/public.html public@v2"
},
"dependencies": {
"@date-fns/utc": "1.2.0",
"@monkeytype/contracts": "workspace:*",
"@monkeytype/funbox": "workspace:*",
"@monkeytype/schemas": "workspace:*",
"@monkeytype/util": "workspace:*",
"@ts-rest/core": "3.52.1",
"@ts-rest/express": "3.52.1",
"@ts-rest/open-api": "3.52.1",
"bcrypt": "5.1.1",
"bullmq": "1.91.1",
"chalk": "4.1.2",
"cors": "2.8.5",
"cron": "2.3.0",
"date-fns": "3.6.0",
"dotenv": "16.4.5",
"etag": "1.8.1",
"express": "5.2.0",
"express-rate-limit": "7.5.1",
"firebase-admin": "12.0.0",
"helmet": "4.6.0",
"ioredis": "4.28.5",
"lru-cache": "7.10.1",
"mjml": "4.15.0",
"mongodb": "6.3.0",
"mustache": "4.2.0",
"nodemailer": "8.0.4",
"object-hash": "3.0.0",
"prom-client": "15.1.3",
"rate-limiter-flexible": "5.0.3",
"simple-git": "3.32.3",
"string-similarity": "4.0.4",
"swagger-stats": "0.99.7",
"ua-parser-js": "0.7.33",
"uuid": "10.0.0",
"winston": "3.6.0",
"zod": "3.23.8"
},
"devDependencies": {
"@monkeytype/oxlint-config": "workspace:*",
"@monkeytype/typescript-config": "workspace:*",
"@redocly/cli": "2.24.1",
"@types/bcrypt": "5.0.2",
"@types/cors": "2.8.12",
"@types/cron": "1.7.3",
"@types/express": "5.0.3",
"@types/ioredis": "4.28.10",
"@types/mjml": "4.7.4",
"@types/mustache": "4.2.2",
"@types/node": "24.9.1",
"@types/nodemailer": "6.4.15",
"@types/object-hash": "3.0.6",
"@types/readline-sync": "1.4.8",
"@types/string-similarity": "4.0.2",
"@types/supertest": "6.0.3",
"@types/swagger-stats": "0.95.11",
"@types/ua-parser-js": "0.7.36",
"@types/uuid": "10.0.0",
"@vitest/coverage-v8": "4.0.15",
"concurrently": "8.2.2",
"openapi3-ts": "2.0.2",
"oxlint": "1.60.0",
"oxlint-tsgolint": "0.21.0",
"readline-sync": "1.4.10",
"supertest": "7.1.4",
"testcontainers": "11.11.0",
"tsx": "4.21.0",
"typescript": "6.0.2",
"vitest": "4.1.0",
"yaml": "2.8.2"
},
"engines": {
"node": ">=24.0.0 <25"
}
}

Some files were not shown because too many files have changed in this diff Show More