adding monkeytype
Some checks failed
Mark Stale PRs / stale (push) Has been cancelled

This commit is contained in:
Benjamin Falch
2026-04-23 13:53:44 +02:00
parent e214a2fd35
commit 2bc741fb78
1930 changed files with 7590652 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
{
"ignorePatterns": ["node_modules", "dist", ".turbo"],
"extends": [
"../oxlint-config/index.jsonc"
// "@monkeytype/oxlint-config"
]
}

View File

@@ -0,0 +1,21 @@
#!/bin/bash
# Determine the directory of the script
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Source the .env file from the parent directory of the script's directory
source "$SCRIPT_DIR/../.env"
echo "Running $BE_SCRIPT_PATH on $BE_HOST with user $BE_USER"
# Connect to SSH and execute remote script
ssh_output=$(ssh "$BE_USER@$BE_HOST" "$BE_SCRIPT_PATH")
# Capture the exit code of the SSH command
exit_code=$?
# Print the output
echo "$ssh_output"
# Forward the exit code of the remote script
exit $exit_code

View File

@@ -0,0 +1,23 @@
#!/bin/bash
# Determine the directory of the script
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Source the .env file from the parent directory of the script's directory
source "$SCRIPT_DIR/../.env"
echo "Purging Cloudflare cache for zone $CF_ZONE_ID"
response=$(curl -s -X POST "https://api.cloudflare.com/client/v4/zones/$CF_ZONE_ID/purge_cache" \
-H "Authorization: Bearer $CF_API_KEY" \
-H "Content-Type: application/json" \
--data '{"purge_everything":true}')
success=$(echo "$response" | grep -o '"success"\s*:\s*true')
if [ "$success" ]; then
echo "Cache purged successfully."
else
echo "Cache purge failed."
echo "Response:"
echo "$response"
fi

View File

@@ -0,0 +1,6 @@
CF_ZONE_ID= #cloudflare zone id
CF_API_KEY= #cloudflare api key
BE_HOST= #backend host
BE_USER= #backend user
BE_SCRIPT_PATH= #backend deploy script path

View File

@@ -0,0 +1,26 @@
{
"name": "@monkeytype/release",
"private": true,
"bin": {
"monkeytype-release": "./src/index.js"
},
"type": "module",
"scripts": {
"dev": "nodemon --watch src --exec \"node ./src/index.js --dry\"",
"dev-hotfix": "nodemon --watch src --exec \"node ./src/index.js --dry --hotfix\"",
"dev-changelog": "nodemon ./src/buildChangelog.js",
"lint": "oxlint . --type-aware --type-check",
"lint-fast": "oxlint .",
"purge-cf-cache": "./bin/purgeCfCache.sh"
},
"dependencies": {
"@octokit/rest": "22.0.1",
"dotenv": "16.4.5",
"readline-sync": "1.4.10"
},
"devDependencies": {
"nodemon": "3.1.14",
"oxlint": "1.60.0",
"oxlint-tsgolint": "0.21.0"
}
}

View File

@@ -0,0 +1,399 @@
import { exec } from "child_process";
// const stream = conventionalChangelog(
// {
// preset: {
// name: "conventionalcommits",
// types: [
// { type: "feat", section: "Features" },
// { type: "impr", section: "Improvements" },
// { type: "fix", section: "Fixes" },
// ],
// },
// },
// undefined,
// undefined,
// undefined,
// {
// headerPartial: "",
// }
// );
// let log = "";
// for await (const chunk of stream) {
// log += chunk;
// }
// log = log.replace(/^\*/gm, "-");
// console.log(log);
// console.log(header + log + footer);
//i might come back to the approach below at some point
const lineDelimiter =
"thisismylinedelimiterthatwilldefinitelynotappearintheactualcommitmessage";
const logDelimiter =
"thisismylogdelimiterthatwilldefinitelynotappearintheactualcommitmessage";
async function getLog() {
function execPromise(command) {
return new Promise((resolve, reject) => {
exec(command, (err, stdout, _stderr) => {
if (err) {
reject(err);
} else {
resolve(stdout);
}
});
});
}
return execPromise(`git describe --tags --abbrev=0 HEAD^`).then((lastTag) =>
execPromise(
`git log --oneline ${lastTag.trim()}..HEAD --pretty="format:${lineDelimiter}%H${logDelimiter}%h${logDelimiter}%s${logDelimiter}%b"`,
),
);
}
function itemIsAddingQuotes(item) {
const typeIsImprovement = item.type === "impr";
const scopeIsQuote = item.scope?.includes("quote");
const messageAdds = item.message.startsWith("add");
const messageQuotes =
item.message.endsWith("quote") === true || item.message.endsWith("quotes");
return typeIsImprovement && scopeIsQuote && messageAdds && messageQuotes;
}
function itemIsAddressingQuoteReports(item) {
const scopeIsQuote =
item.scope?.includes("quote") === true ||
item.scope?.includes("quotes") === true;
const messageReport =
item.message.includes("report") === true ||
item.message.includes("reports") === true;
return scopeIsQuote && messageReport;
}
const titles = {
feat: "Features",
impr: "Improvements",
fix: "Fixes",
};
function getPrLink(pr) {
const prNum = pr.replace("#", "");
return `[#${prNum}](https://github.com/monkeytypegame/monkeytype/pull/${prNum})`;
}
function getCommitLink(hash, longHash) {
return `[${hash}](https://github.com/monkeytypegame/monkeytype/commit/${longHash})`;
}
function buildItems(items, mergeTypeAndScope = false) {
let ret = "";
for (let item of items) {
let scope = item.scope ? `**${item.scope}:** ` : "";
if (mergeTypeAndScope) {
scope = `**${item.type}${item.scope ? `(${item.scope})` : ""}:** `;
}
const usernames =
item.usernames.length > 0 ? ` (${item.usernames.join(", ")})` : "";
const pr =
item.prs.length > 0
? ` (${item.prs.map((p) => getPrLink(p)).join(", ")})`
: "";
const hash = ` (${item.hashes
.map((h) => getCommitLink(h.short, h.full))
.join(", ")})`;
ret += `- ${scope}${item.message}${usernames}${pr}${hash}\n`;
}
return ret;
}
function buildSection(type, allItems) {
let ret = `### ${titles[type]}\n\n`;
const items = allItems.filter(
(item) => item.type === type && !item.body.includes("!nuf"),
);
if (items.length === 0) {
return "";
}
return (ret += buildItems(items));
}
function buildFooter(logs) {
let out =
"\n### Nerd stuff\n\nThese changes will not be visible to users, but are included for completeness and to credit contributors.\n\n";
const featLogs = logs.filter(
(item) => item.type === "feat" && item.body.includes("!nuf"),
);
const imprLogs = logs.filter(
(item) => item.type === "impr" && item.body.includes("!nuf"),
);
const fixLogs = logs.filter(
(item) => item.type === "fix" && item.body.includes("!nuf"),
);
const styleLogs = logs.filter((item) => item.type === "style");
const docLogs = logs.filter((item) => item.type === "docs");
const refactorLogs = logs.filter((item) => item.type === "refactor");
const perfLogs = logs.filter((item) => item.type === "perf");
const ciLogs = logs.filter((item) => item.type === "ci");
const testLogs = logs.filter((item) => item.type === "test");
const buildLogs = logs.filter((item) => item.type === "build");
const choreLogs = logs.filter((item) => item.type === "chore");
const allOtherLogs = [
...featLogs,
...imprLogs,
...fixLogs,
...styleLogs,
...docLogs,
...refactorLogs,
...perfLogs,
...ciLogs,
...testLogs,
...buildLogs,
...choreLogs,
];
//remove dupes based on hash
const uniqueOtherLogs = allOtherLogs.filter(
(item, index, self) =>
index === self.findIndex((t) => t.hashes[0].full === item.hashes[0].full),
);
// console.log(uniqueOtherLogs);
out += buildItems(uniqueOtherLogs, true);
return out;
}
// function buildFooter(logs) {
// const styleLogs = logs.filter((item) => item.type === "style");
// const docLogs = logs.filter((item) => item.type === "docs");
// const refactorLogs = logs.filter((item) => item.type === "refactor");
// const perfLogs = logs.filter((item) => item.type === "perf");
// const ciLogs = logs.filter((item) => item.type === "ci");
// const testLogs = logs.filter((item) => item.type === "test");
// const buildLogs = logs.filter((item) => item.type === "build");
// const otherStrings = [];
// if (styleLogs.length > 0) {
// otherStrings.push("style");
// }
// if (docLogs.length > 0) {
// otherStrings.push("documentation");
// }
// if (refactorLogs.length > 0) {
// otherStrings.push("refactoring");
// }
// if (perfLogs.length > 0) {
// otherStrings.push("performance");
// }
// if (ciLogs.length > 0) {
// otherStrings.push("CI");
// }
// if (testLogs.length > 0) {
// otherStrings.push("testing");
// }
// if (buildLogs.length > 0) {
// otherStrings.push("build");
// }
// if (otherStrings.length === 0) {
// return "";
// }
// //build a string where otherStrings are joined by commas and the last one is joined by "and"
// const finalString =
// otherStrings.length > 1
// ? otherStrings.slice(0, -1).join(", ") + " and " + otherStrings.slice(-1)
// : otherStrings[0];
// return `\n### Other\n\n- Various ${finalString} changes`;
// }
function convertStringToLog(logString) {
let log = [];
for (let line of logString) {
if (line === "" || line === "\r" || line === "\n") continue;
// console.log(line);
//split line based on the format: d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 style: adjusted testConfig and modesNotice.
//use regex to split
// const [_, hash, shortHash, fullMessage] = line.split(
// /(\w{40}) (\w{9,10}) (.*)/
// );
const [hash, shortHash, title, body] = line
.split(logDelimiter)
.map((s) => s.trim());
// console.log({
// hash,
// shortHash,
// title,
// body,
// });
//split message using regex based on fix(language): spelling mistakes in Nepali wordlist and quotes (sapradhan) (#4528)
//scope is optional, username is optional, pr number is optional
const [_, type, scope, message, username, pr] = title.split(
/^(\w+)(?:\(([^)]+)\))?:\s+(.+?)(?:\s*\((@[^)]+)\))?(?:\s+\((#[^)]+)\))?$/,
);
const usernames = username ? username.split(", ") : [];
const prs = pr ? pr.split(", ") : [];
if (type && message) {
log.push({
hashes: [
{
short: shortHash,
full: hash,
},
],
type,
scope,
message,
usernames: usernames ?? [],
prs: prs ?? [],
body: body ?? "",
});
} else {
// console.log({ hash, shortHash, title, body });
// console.warn("skipping line due to invalid format: " + line);
}
}
return log;
}
const header =
"Thank you to all the contributors who made this release possible!";
async function main() {
let logString = await getLog();
logString = logString.split(lineDelimiter);
//test commits
// const logString = [
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 build: add new feature (miodec, someone) (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 build(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 chore: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 chore(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 ci: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 ci(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 docs: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 docs(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 feat: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 feat(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 impr: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 impr(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 fix: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 fix(score): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 perf: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 perf(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 refactor: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 refactor(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 revert: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 revert(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 style: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 style(scope): add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 test: add new feature (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c106 d2739e4f1 test(scope): add new feature (#1234)",
// ];
//test commits
// logString = [
// "d2739e4f193137db4d86450f0d50b3489d75c101 d2739e4f1 fix: add new fix (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c102 d2739e4f1 fix(nuf something): add new fix nuf (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c103 d2739e4f1 test: add new test (#1234)",
// "d2739e4f193137db4d86450f0d50b3489d75c104 d2739e4f1 test(blah): add new test blah (#1234)",
// ];
let log = convertStringToLog(logString);
const contributorCount = log.flatMap((l) => {
const filtered = l.usernames.filter((u) => {
const lowerCased = u.toLowerCase();
return (
lowerCased !== "monkeytype-bot" &&
lowerCased !== "dependabot" &&
lowerCased !== "miodec"
);
});
return filtered;
}).length;
let quoteAddCommits = log.filter((item) => itemIsAddingQuotes(item));
log = log.filter((item) => !itemIsAddingQuotes(item));
let quoteReportCommits = log.filter((item) =>
itemIsAddressingQuoteReports(item),
);
log = log.filter((item) => !itemIsAddressingQuoteReports(item));
if (quoteAddCommits.length > 0) {
log.push({
hashes: quoteAddCommits.flatMap((item) => item.hashes),
type: "impr",
scope: "quotes",
message: "add quotes in various languages",
usernames: quoteAddCommits.flatMap((item) => item.usernames),
prs: quoteAddCommits.flatMap((item) => item.prs),
body: "",
});
}
if (quoteReportCommits.length > 0) {
log.push({
hashes: quoteReportCommits.flatMap((item) => item.hashes),
type: "fix",
scope: "quotes",
message: "update or remove quotes reported by users",
usernames: quoteReportCommits.flatMap((item) => item.usernames),
prs: quoteReportCommits.flatMap((item) => item.prs),
body: "",
});
}
let final = "";
if (contributorCount > 0) {
final += header + "\n\n\n";
}
const sections = [];
for (const type of Object.keys(titles)) {
const section = buildSection(type, log);
if (section) {
sections.push(section);
}
}
final += sections.join("\n\n");
const footer = buildFooter(log);
if (footer) {
final += "\n" + footer;
}
console.log(final);
}
main();

View File

@@ -0,0 +1,60 @@
import dotenv from "dotenv";
dotenv.config();
const OWNER = "monkeytypegame";
const REPO = "monkeytype";
const EXCLUDED = new Set(["monkeytypegeorge", "miodec"]);
async function getContributors(page) {
console.log("Getting contributors from page " + page);
const res = await fetch(
`https://api.github.com/repos/${OWNER}/${REPO}/contributors?anon=1&per_page=100&page=${page}`,
{
method: "GET",
headers: {
"User-Agent": "monkeytypegame release script",
...(process.env.GITHUB_TOKEN && {
Authorization: `token ${process.env.GITHUB_TOKEN}`,
}),
},
},
);
return res.json();
}
async function main() {
let total = [];
let page = 1;
let lastCount = 1;
while (lastCount > 0) {
const data = await getContributors(page);
const contributors = data.map((c) => ({
name: c.login ?? c.name,
contributions: c.contributions,
}));
lastCount = contributors.length;
page++;
total.push(...contributors);
}
total = total
.filter(
(c) => !EXCLUDED.has(c.name?.toLowerCase()) && !c.name?.includes("[bot]"),
)
.sort((a, b) => b.contributions - a.contributions);
// dedupe
const seen = new Set();
total = total.filter((c) => {
if (seen.has(c.name)) return false;
seen.add(c.name);
return true;
});
console.log(JSON.stringify(total.map((c) => c.name)));
}
main();

376
packages/release/src/index.js Executable file
View File

@@ -0,0 +1,376 @@
import { Octokit } from "@octokit/rest";
import { execSync } from "child_process";
import dotenv from "dotenv";
import fs, { readFileSync } from "fs";
import readlineSync from "readline-sync";
import path, { dirname } from "path";
import { fileURLToPath } from "url";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
dotenv.config();
const args = new Set(process.argv.slice(2));
const isFrontend = args.has("--fe");
const noDeploy = args.has("--no-deploy");
const isBackend = args.has("--be");
const isDryRun = args.has("--dry");
const noSyncCheck = args.has("--no-sync-check");
const hotfix = args.has("--hotfix");
const previewFe = args.has("--preview-fe");
const PROJECT_ROOT = path.resolve(__dirname, "../../../");
const runCommand = (command, force) => {
if (isDryRun && !force) {
console.log(`[Dry Run] Command: ${command}`);
return "[Dry Run] Command executed.";
} else {
try {
const output = execSync(command, { stdio: "pipe" }).toString();
return output;
} catch (error) {
console.error(`Error executing command ${command}`);
console.error(error.output.toString());
process.exit(1);
}
}
return undefined;
};
const runProjectRootCommand = (command, force) => {
if (isDryRun && !force) {
console.log(`[Dry Run] Command: ${command}`);
return "[Dry Run] Command executed.";
} else {
try {
const output = execSync(`cd ${PROJECT_ROOT} && ${command}`, {
stdio: "pipe",
}).toString();
return output;
} catch (error) {
console.error(`Error executing command ${command}`);
console.error(error);
process.exit(1);
}
}
return undefined;
};
const checkBranchSync = () => {
console.log("Checking if local branch is master...");
const currentBranch = runProjectRootCommand(
"git branch --show-current",
).trim();
if (currentBranch !== "master") {
console.error(
"Local branch is not master. Please checkout the master branch.",
);
process.exit(1);
}
console.log("Checking if local master branch is in sync with origin...");
if (noSyncCheck) {
console.log("Skipping sync check.");
} else if (isDryRun) {
console.log("[Dry Run] Checking sync...");
} else {
try {
// Fetch the latest changes from the remote repository
runProjectRootCommand("git fetch origin");
// Get the commit hashes of the local and remote master branches
const localMaster = runProjectRootCommand("git rev-parse master").trim();
const remoteMaster = runProjectRootCommand(
"git rev-parse origin/master",
).trim();
if (localMaster !== remoteMaster) {
console.error(
"Local master branch is not in sync with origin. Please pull the latest changes before proceeding.",
);
process.exit(1);
}
} catch (error) {
console.error("Error checking branch sync status.");
console.error(error);
process.exit(1);
}
}
};
const getCurrentVersion = () => {
console.log("Getting current version...");
const rootPackageJson = JSON.parse(
readFileSync(`${PROJECT_ROOT}/package.json`, "utf-8"),
);
return rootPackageJson.version;
};
const incrementVersion = (currentVersion) => {
console.log("Incrementing version...");
const now = new Date();
const year = Number(now.getFullYear().toString().slice(-2));
const start = new Date(now.getFullYear(), 0, 1);
const week = Math.ceil(((now - start) / 86400000 + start.getDay() + 1) / 7);
const [prevYear, prevWeek, minor] = currentVersion.split(".").map(Number);
let newMinor = minor + 1;
if (year !== prevYear || week !== prevWeek) {
newMinor = 0;
}
const v = `v${year}.${week}.${newMinor}`;
return v;
};
const updatePackage = (newVersion) => {
console.log("Updating package.json...");
if (isDryRun) {
console.log(`[Dry Run] Updated package.json to version ${newVersion}`);
return;
}
const packagePath = `${PROJECT_ROOT}/package.json`;
// Read the package.json file
const packageJson = JSON.parse(fs.readFileSync(packagePath, "utf8"));
// Update the version field
packageJson.version = newVersion.replace("v", "");
// Write the updated JSON back to package.json
fs.writeFileSync(
packagePath,
JSON.stringify(packageJson, null, 2) + "\n",
"utf8",
);
console.log(`Updated package.json to version ${newVersion}`);
};
const checkUncommittedChanges = () => {
console.log("Checking uncommitted changes...");
const status = execSync("git status --porcelain").toString().trim();
if (isDryRun) {
console.log("[Dry Run] Checking uncommitted changes...");
} else if (status) {
console.error(
"You have uncommitted changes. Please commit or stash them before proceeding.",
);
process.exit(1);
}
};
const installDependencies = () => {
console.log("Installing dependencies...");
if (isDryRun) {
console.log("[Dry Run] Dependencies would be installed.");
} else {
runProjectRootCommand("pnpm i");
}
};
const buildProject = () => {
console.log("Building project...");
if (isFrontend && !isBackend) {
runProjectRootCommand(
"NODE_ENV=production SENTRY=1 npx turbo lint test check-assets build --filter @monkeytype/frontend --force",
);
} else if (isBackend && !isFrontend) {
runProjectRootCommand(
"NODE_ENV=production SENTRY=1 npx turbo lint test build --filter @monkeytype/backend --force",
);
} else {
runProjectRootCommand(
"NODE_ENV=production SENTRY=1 npx turbo lint test check-assets build --force",
);
}
};
const deployBackend = () => {
console.log("Deploying backend...");
const p = path.resolve(__dirname, "../bin/deployBackend.sh");
runCommand(`sh ${p}`);
};
const deployFrontend = () => {
console.log("Deploying frontend...");
runProjectRootCommand(
"cd frontend && npx firebase deploy -P live --only hosting",
);
};
const purgeCache = () => {
console.log("Purging Cloudflare cache...");
const p = path.resolve(__dirname, "../bin/purgeCfCache.sh");
runCommand(`sh ${p}`);
};
const generateChangelog = async () => {
console.log("Generating changelog...");
const p = path.resolve(__dirname, "./buildChangelog.js");
const changelog = runCommand(`node ${p}`, true);
return changelog;
};
const generateContributors = () => {
console.log("Generating contributors list...");
try {
const p = path.resolve(__dirname, "./buildContributors.js");
let contributors = runCommand(`node ${p}`, true);
contributors = JSON.parse(
contributors.replaceAll("\n", "").replace(/^.*?\[/, "["),
);
fs.writeFileSync(
`${PROJECT_ROOT}/frontend/static/contributors.json`,
JSON.stringify(contributors, null, 2),
"utf8",
);
console.log("Contributors list updated.");
} catch (e) {
console.error("Failed to generate contributors list.");
console.error(e);
}
};
const createCommitAndTag = (version) => {
console.log("Creating commit and tag... Pushing to Github...");
runCommand(`git add .`);
runCommand(`git commit -m "chore: release ${version}" --no-verify`);
runCommand(`git tag ${version}`);
runCommand(`git push origin master --tags --no-verify`);
};
const createGithubRelease = async (version, changelogContent) => {
console.log("Creating GitHub release...");
if (isDryRun) {
console.log(
`[Dry Run] Sent release request to GitHub for version ${version}`,
);
} else {
const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN });
const { owner, repo } = {
owner: "monkeytypegame",
repo: "monkeytype",
};
await octokit.repos.createRelease({
owner,
repo,
tag_name: version,
name: `${version}`,
body: changelogContent,
});
}
};
const main = async () => {
if (previewFe) {
console.log(`Starting frontend preview deployment process...`);
installDependencies();
runProjectRootCommand(
"NODE_ENV=production npx turbo lint test check-assets build --filter @monkeytype/frontend --force",
);
const name = readlineSync.question(
"Enter preview channel name (default: preview): ",
);
let channelName = name.trim();
if (channelName === "") {
channelName = "preview";
}
const expirationTime = readlineSync.question(
"Enter expiration time (e.g., 2h, default: 1d): ",
);
let expires = expirationTime.trim();
if (expires === "") {
expires = "1d";
}
console.log(
`Deploying frontend preview to channel "${channelName}" with expiration "${expires}"...`,
);
const result = runProjectRootCommand(
`cd frontend && npx firebase hosting:channel:deploy ${channelName} -P live --expires ${expires}`,
);
console.log(result);
console.log("Frontend preview deployed successfully.");
process.exit(0);
}
console.log(`Starting ${hotfix ? "hotfix" : "release"} process...`);
if (!hotfix) checkBranchSync();
checkUncommittedChanges();
installDependencies();
let changelogContent;
let newVersion;
if (!hotfix) {
changelogContent = await generateChangelog();
console.log(changelogContent);
if (!readlineSync.keyInYN("Changelog looks good?")) {
console.log("Exiting.");
process.exit(1);
}
const currentVersion = getCurrentVersion();
newVersion = incrementVersion(currentVersion);
console.log(`New version: ${newVersion}`);
}
buildProject();
if (!hotfix && !readlineSync.keyInYN(`Ready to release ${newVersion}?`)) {
console.log("Exiting.");
process.exit(1);
}
if (!noDeploy && (isBackend || (!isFrontend && !isBackend))) {
deployBackend();
}
if (!noDeploy && (isFrontend || (!isFrontend && !isBackend))) {
deployFrontend();
}
if (!noDeploy) purgeCache();
if (!hotfix) {
generateContributors();
updatePackage(newVersion);
createCommitAndTag(newVersion);
try {
await createGithubRelease(newVersion, changelogContent);
} catch (e) {
console.error(`Failed to create release on GitHub: ${e}`);
console.log("Please create the release manually.");
}
}
if (hotfix) {
console.log("Hotfix completed successfully.");
} else {
console.log(`Release ${newVersion} completed successfully.`);
}
process.exit(0);
};
main();