feat(monorepo): migrate to typescript monorepo

This commit is contained in:
2025-11-07 17:09:29 +02:00
committed by BernardNganduDev
parent 3e09956f05
commit 075a388ccb
745 changed files with 2341 additions and 5082 deletions
+22
View File
@@ -0,0 +1,22 @@
import { logger } from "@basango/logger";
import { runSyncCrawl } from "@/process/sync/tasks";
import { CRAWLING_USAGE, parseCrawlingCliArgs } from "@/scripts/utils";
const main = async (): Promise<void> => {
const options = parseCrawlingCliArgs();
if (options.sourceId === undefined) {
console.log(CRAWLING_USAGE);
process.exitCode = 1;
return;
}
try {
await runSyncCrawl({ ...options });
} catch (error) {
logger.error({ error }, "Synchronous crawl failed");
process.exitCode = 1;
}
};
void main();
+24
View File
@@ -0,0 +1,24 @@
import { logger } from "@basango/logger";
import { scheduleAsyncCrawl } from "@/process/async/tasks";
import { CRAWLING_USAGE, parseCrawlingCliArgs } from "@/scripts/utils";
const main = async (): Promise<void> => {
const options = parseCrawlingCliArgs();
if (options.sourceId === undefined) {
console.log(CRAWLING_USAGE);
process.exitCode = 1;
return;
}
try {
const id = await scheduleAsyncCrawl({ ...options });
logger.info({ id, options }, "Scheduled asynchronous crawl job");
} catch (error) {
logger.error({ error }, "Failed to schedule crawl job");
process.exitCode = 1;
}
};
void main();
+39
View File
@@ -0,0 +1,39 @@
import { parseArgs } from "node:util";
import { CrawlingOptions } from "@/process/crawler";
interface WorkerCliOptions {
queue?: string[];
}
export const CRAWLING_USAGE = `
Usage: bun run crawl:[async|sync] -- --sourceId <id> [options]
Options:
--pageRange <range> Optional page range filter (e.g. 1:5)
--dateRange <range> Optional date range filter (e.g. 2024-01-01:2024-01-31)
--category <slug> Optional category to crawl
-h, --help Show this message
`;
export const parseWorkerCliArgs = (): WorkerCliOptions => {
const { values } = parseArgs({
options: {
queue: { multiple: true, short: "q", type: "string" },
},
});
return values as WorkerCliOptions;
};
export const parseCrawlingCliArgs = (): CrawlingOptions => {
const { values } = parseArgs({
options: {
category: { type: "string" },
dateRange: { type: "string" },
pageRange: { type: "string" },
sourceId: { type: "string" },
},
});
return values as CrawlingOptions;
};
+35
View File
@@ -0,0 +1,35 @@
import { logger } from "@basango/logger";
import { createQueueManager } from "@/process/async/queue";
import { startWorker } from "@/process/async/worker";
import { parseWorkerCliArgs } from "@/scripts/utils";
const main = async (): Promise<void> => {
const options = parseWorkerCliArgs();
const manager = createQueueManager();
const queues = options.queue?.length
? options.queue.map((name) => manager.queueName(name))
: undefined;
const handle = startWorker({
queueManager: manager,
queueNames: queues,
});
const shutdown = async (signal: NodeJS.Signals) => {
logger.info({ signal }, "Received shutdown signal, draining workers");
try {
await handle.close();
} finally {
await manager.close();
process.exit(0);
}
};
process.once("SIGINT", (signal) => void shutdown(signal));
process.once("SIGTERM", (signal) => void shutdown(signal));
logger.info({ queueNames: queues }, "Crawler workers started");
};
void main();