feat(monorepo): migrate to typescript monorepo

This commit is contained in:
2025-11-07 17:09:29 +02:00
committed by BernardNganduDev
parent 3e09956f05
commit 075a388ccb
745 changed files with 2341 additions and 5082 deletions
+55
View File
@@ -0,0 +1,55 @@
import { drizzle } from "drizzle-orm/node-postgres";
import { Pool } from "pg";
import * as schema from "@/schema";
const isDevelopment = process.env.NODE_ENV === "development";
const connectionConfig = {
max: isDevelopment ? 8 : 12,
idleTimeoutMillis: isDevelopment ? 5_000 : 60_000,
connectionTimeoutMillis: 15_000,
maxUses: isDevelopment ? 100 : 0,
allowExitOnIdle: true,
};
const pool = new Pool({
connectionString: process.env.DATABASE_URL ?? process.env.DATABASE_PRIMARY_URL!,
...connectionConfig,
});
// Lightweight connection pool monitoring (single pool)
export const getConnectionPoolStats = () => {
const stats = {
name: "primary",
total: pool.options.max ?? 0,
idle: pool.idleCount ?? 0,
active: Math.max(0, (pool.totalCount ?? 0) - (pool.idleCount ?? 0)),
waiting: pool.waitingCount ?? 0,
ended: (pool as any).ended ?? false,
};
const totalConnections = connectionConfig.max;
const utilization =
totalConnections > 0 ? Math.round((stats.active / totalConnections) * 100) : 0;
return {
timestamp: new Date().toISOString(),
region: process.env.FLY_REGION || "unknown",
instance: process.env.FLY_ALLOC_ID || "local",
pools: { primary: stats },
summary: {
totalConnections,
totalActive: stats.active,
totalWaiting: stats.waiting,
hasExhaustedPools: stats.active >= totalConnections || (stats.waiting ?? 0) > 0,
utilizationPercent: utilization,
},
};
};
export const db = drizzle(pool, {
schema,
casing: "snake_case",
});
export const connectDb = async () => db;
export type Database = Awaited<ReturnType<typeof connectDb>>;
+2
View File
@@ -0,0 +1,2 @@
export const SOURCE_IMAGE_BASE = "https://devscast.org/images/sources/";
export const PUBLICATION_GRAPH_DAYS = 180;
+547
View File
@@ -0,0 +1,547 @@
import type { AnyColumn, SQL } from "drizzle-orm";
import { and, asc, desc, eq, gt, lt, or, sql } from "drizzle-orm";
import type { Database } from "@/client";
import { articles, bookmarkArticles, bookmarks, comments, sources, users } from "@/schema";
import {
buildPaginationResult,
createPageState,
decodeCursor,
type PageRequest,
type PageState,
type PaginationMeta,
type SortDirection,
} from "@/utils/pagination";
export interface ArticleFilters {
search?: string | null;
category?: string | null;
dateRange?: { start: number; end: number } | null;
sortDirection?: SortDirection;
}
export interface ArticleOverviewRow {
article_id: string;
articleTitle: string;
articleLink: string;
articleCategories: string | null;
article_excerpt: string | null;
article_published_at: string;
article_image: string | null;
article_reading_time: number | null;
sourceId: string;
source_display_name: string | null;
source_image: string;
sourceUrl: string;
source_name: string;
source_created_at: string;
article_is_bookmarked: boolean;
}
export interface ArticleOverviewResult {
data: ArticleOverviewRow[];
pagination: PaginationMeta;
}
export interface ArticleDetailsRow {
article_id: string;
articleTitle: string;
articleLink: string;
articleCategories: string | null;
articleBody: string;
article_hash: string;
article_published_at: string;
article_crawled_at: string;
article_updated_at: string | null;
article_bias: string;
article_reliability: string;
article_transparency: string;
article_sentiment: string;
article_metadata: unknown;
article_reading_time: number | null;
sourceId: string;
source_name: string;
source_description: string | null;
sourceUrl: string;
source_updated_at: string | null;
source_display_name: string | null;
source_bias: string;
source_reliability: string;
source_transparency: string;
source_image: string;
article_is_bookmarked: boolean;
}
export interface ArticleCommentRow {
comment_id: string;
comment_content: string;
comment_sentiment: string;
comment_created_at: string;
user_id: string;
user_name: string;
}
interface NormalizedArticleFilters {
search?: string;
category?: string;
dateRange?: { start: number; end: number } | null;
sortDirection: SortDirection;
}
export interface ArticleExportRow {
articleId: string;
articleTitle: string;
articleLink: string;
articleCategories: string | null;
articleBody: string;
articleSource: string;
articleHash: string;
articlePublishedAt: string;
articleCrawledAt: string;
}
export interface ArticleExportParams {
source?: string | null;
dateRange?: { start: number; end: number } | null;
batchSize?: number;
}
export async function* getArticlesForExport(
db: Database,
params: ArticleExportParams = {},
): AsyncGenerator<ArticleExportRow> {
const batchSize = params.batchSize && params.batchSize > 0 ? params.batchSize : 1000;
const filters: SQL[] = [];
if (params.source) {
filters.push(eq(sources.name, params.source));
}
if (params.dateRange) {
filters.push(
sql`${articles.publishedAt} BETWEEN to_timestamp(
${params.dateRange.start}
)
AND
to_timestamp
(
${params.dateRange.end}
)`,
);
}
let query = db
.select({
articleId: articles.id,
articleTitle: articles.title,
articleLink: articles.link,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
articleBody: articles.body,
articleSource: sources.name,
articleHash: articles.hash,
articlePublishedAt: articles.publishedAt,
articleCrawledAt: articles.crawledAt,
})
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id));
if (filters.length === 1) {
query = query.where(filters[0]);
} else if (filters.length > 1) {
query = query.where(and(...filters));
}
query = query.orderBy(desc(articles.publishedAt), desc(articles.id));
let offset = 0;
while (true) {
const rows = await query.limit(batchSize).offset(offset);
if (rows.length === 0) {
break;
}
for (const row of rows) {
yield {
...row,
articleCategories: row.articleCategories ?? null,
};
}
offset += batchSize;
}
}
const SOURCE_IMAGE_BASE = "https://devscast.org/images/sources/";
function normalizeArticleFilters(filters?: ArticleFilters): NormalizedArticleFilters {
const trimmedSearch = filters?.search?.trim();
const trimmedCategory = filters?.category?.trim();
return {
search: trimmedSearch && trimmedSearch.length > 0 ? trimmedSearch : undefined,
category: trimmedCategory && trimmedCategory.length > 0 ? trimmedCategory : undefined,
dateRange: filters?.dateRange ?? null,
sortDirection: filters?.sortDirection ?? "desc",
};
}
function buildArticleFilterConditions(filters: NormalizedArticleFilters): {
conditions: SQL[];
searchQuery?: string;
} {
const conditions: SQL[] = [];
let searchQuery: string | undefined;
if (filters.category) {
conditions.push(sql`${filters.category} = ANY(
${articles.categories}
)`);
}
if (filters.search) {
const sanitized = filters.search.replace(/\s+/g, " & ");
if (sanitized.length > 0) {
searchQuery = sanitized;
conditions.push(
sql`${articles.tsv} @@ to_tsquery('french',
${sanitized}
)`,
);
}
}
if (filters.dateRange) {
conditions.push(
sql`${articles.publishedAt} BETWEEN to_timestamp(
${filters.dateRange.start}
)
AND
to_timestamp
(
${filters.dateRange.end}
)`,
);
}
return { conditions, searchQuery };
}
function buildBookmarkExistsExpression(userId: string): SQL<boolean> {
return sql`EXISTS
(SELECT 1
FROM ${bookmarkArticles} ba
INNER JOIN ${bookmarks} b ON ba.bookmark_id = b.id
WHERE ba.article_id = ${articles.id}
AND b.user_id = ${userId})`;
}
async function fetchArticleOverview(
db: Database,
options: {
userId: string;
page: PageState;
filters: NormalizedArticleFilters;
baseConditions?: SQL[];
},
): Promise<ArticleOverviewResult> {
const baseConditions = options.baseConditions ?? [];
const { conditions: filterConditions, searchQuery } = buildArticleFilterConditions(
options.filters,
);
const whereConditions = [...baseConditions, ...filterConditions];
const bookmarkExpression = buildBookmarkExistsExpression(options.userId);
const selectFields = {
article_id: articles.id,
articleTitle: articles.title,
articleLink: articles.link,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
article_excerpt: articles.excerpt,
article_published_at: articles.publishedAt,
article_image: articles.image,
article_reading_time: articles.readingTime,
sourceId: sources.id,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
sourceUrl: sources.url,
source_name: sources.name,
source_created_at: sources.createdAt,
article_is_bookmarked: bookmarkExpression,
} satisfies Record<string, SQL | AnyColumn>;
let query = db
.select(selectFields)
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id));
const cursor = decodeCursor(options.page.cursor);
if (cursor?.date) {
const comparison =
options.filters.sortDirection === "asc"
? or(
gt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), gt(articles.id, cursor.id)),
)
: or(
lt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), lt(articles.id, cursor.id)),
);
whereConditions.push(comparison);
}
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const orderings: (SQL | AnyColumn)[] = [];
if (searchQuery) {
orderings.push(
options.filters.sortDirection === "asc"
? sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
ASC`
: sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
DESC`,
);
}
if (options.filters.sortDirection === "asc") {
orderings.push(asc(articles.publishedAt), asc(articles.id));
} else {
orderings.push(desc(articles.publishedAt), desc(articles.id));
}
const rows = await query.orderBy(...orderings).limit(options.page.limit + 1);
return buildPaginationResult(rows, options.page, {
id: "article_id",
date: "article_published_at",
});
}
export async function getArticleOverviewList(
db: Database,
params: {
userId: string;
page?: PageRequest;
filters?: ArticleFilters;
},
): Promise<ArticleOverviewResult> {
const page = createPageState(params.page);
const filters = normalizeArticleFilters(params.filters);
return fetchArticleOverview(db, {
userId: params.userId,
page,
filters,
});
}
export async function getSourceArticleOverviewList(
db: Database,
params: {
sourceId: string;
userId: string;
page?: PageRequest;
filters?: ArticleFilters;
},
): Promise<ArticleOverviewResult> {
const page = createPageState(params.page);
const filters = normalizeArticleFilters(params.filters);
return fetchArticleOverview(db, {
userId: params.userId,
page,
filters,
baseConditions: [eq(sources.id, params.sourceId)],
});
}
export async function getBookmarkedArticleList(
db: Database,
params: {
userId: string;
bookmarkId: string;
page?: PageRequest;
filters?: ArticleFilters;
},
): Promise<ArticleOverviewResult> {
const page = createPageState(params.page);
const filters = normalizeArticleFilters(params.filters);
const { conditions: filterConditions, searchQuery } = buildArticleFilterConditions(filters);
const whereConditions: SQL[] = [
eq(bookmarks.id, params.bookmarkId),
eq(bookmarks.userId, params.userId),
...filterConditions,
];
const selectFields = {
article_id: articles.id,
articleTitle: articles.title,
articleLink: articles.link,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
article_excerpt: articles.excerpt,
article_published_at: articles.publishedAt,
article_image: articles.image,
article_reading_time: articles.readingTime,
sourceId: sources.id,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
sourceUrl: sources.url,
source_name: sources.name,
source_created_at: sources.createdAt,
article_is_bookmarked: sql<boolean>`true`,
} satisfies Record<string, SQL | AnyColumn>;
let query = db
.select(selectFields)
.from(bookmarkArticles)
.innerJoin(articles, eq(bookmarkArticles.articleId, articles.id))
.innerJoin(bookmarks, eq(bookmarkArticles.bookmarkId, bookmarks.id))
.innerJoin(sources, eq(articles.sourceId, sources.id));
const cursor = decodeCursor(page.cursor);
if (cursor?.date) {
const comparison =
filters.sortDirection === "asc"
? or(
gt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), gt(articles.id, cursor.id)),
)
: or(
lt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), lt(articles.id, cursor.id)),
);
whereConditions.push(comparison);
}
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const orderings: (SQL | AnyColumn)[] = [];
if (searchQuery) {
orderings.push(
filters.sortDirection === "asc"
? sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
ASC`
: sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
DESC`,
);
}
if (filters.sortDirection === "asc") {
orderings.push(asc(articles.publishedAt), asc(articles.id));
} else {
orderings.push(desc(articles.publishedAt), desc(articles.id));
}
const rows = await query.orderBy(...orderings).limit(page.limit + 1);
return buildPaginationResult(rows, page, {
id: "article_id",
date: "article_published_at",
});
}
export async function getArticleDetails(
db: Database,
params: { id: string; userId: string },
): Promise<ArticleDetailsRow | null> {
const bookmarkExpression = buildBookmarkExistsExpression(params.userId);
const [row] = await db
.select({
article_id: articles.id,
articleTitle: articles.title,
articleLink: articles.link,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
articleBody: articles.body,
article_hash: articles.hash,
article_published_at: articles.publishedAt,
article_crawled_at: articles.crawledAt,
article_updated_at: articles.updatedAt,
article_bias: articles.bias,
article_reliability: articles.reliability,
article_transparency: articles.transparency,
article_sentiment: articles.sentiment,
article_metadata: articles.metadata,
article_reading_time: articles.readingTime,
sourceId: sources.id,
source_name: sources.name,
source_description: sources.description,
sourceUrl: sources.url,
source_updated_at: sources.updatedAt,
source_display_name: sources.displayName,
source_bias: sources.bias,
source_reliability: sources.reliability,
source_transparency: sources.transparency,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
article_is_bookmarked: bookmarkExpression,
})
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id))
.where(eq(articles.id, params.id))
.limit(1);
return row ?? null;
}
export async function getArticleCommentList(
db: Database,
params: { articleId: string; page?: PageRequest },
): Promise<{ data: ArticleCommentRow[]; pagination: PaginationMeta }> {
const page = createPageState(params.page);
const whereConditions: SQL[] = [eq(comments.articleId, params.articleId)];
const cursor = decodeCursor(page.cursor);
if (cursor?.date) {
whereConditions.push(
or(
lt(comments.createdAt, cursor.date),
and(eq(comments.createdAt, cursor.date), lt(comments.id, cursor.id)),
),
);
}
let query = db
.select({
comment_id: comments.id,
comment_content: comments.content,
comment_sentiment: comments.sentiment,
comment_created_at: comments.createdAt,
user_id: users.id,
user_name: users.name,
})
.from(comments)
.innerJoin(users, eq(comments.userId, users.id));
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const rows = await query
.orderBy(desc(comments.createdAt), desc(comments.id))
.limit(page.limit + 1);
return buildPaginationResult(rows, page, {
id: "comment_id",
date: "comment_created_at",
});
}
+66
View File
@@ -0,0 +1,66 @@
import type { SQL } from "drizzle-orm";
import { and, desc, eq, lt, sql } from "drizzle-orm";
import type { Database } from "@/client";
import { bookmarkArticles, bookmarks } from "@/schema";
import {
buildPaginationResult,
createPageState,
decodeCursor,
type PageRequest,
type PaginationMeta,
} from "@/utils/pagination";
export interface BookmarkRow {
bookmark_id: string;
bookmark_name: string;
bookmark_description: string | null;
bookmark_created_at: string;
bookmark_updated_at: string | null;
bookmark_articles_count: number;
bookmark_is_public: boolean;
}
export interface BookmarkListResult {
data: BookmarkRow[];
pagination: PaginationMeta;
}
export async function getBookmarkList(
db: Database,
params: { userId: string; page?: PageRequest },
): Promise<BookmarkListResult> {
const page = createPageState(params.page);
const whereConditions: SQL[] = [eq(bookmarks.userId, params.userId)];
const cursor = decodeCursor(page.cursor);
if (cursor?.id) {
whereConditions.push(lt(bookmarks.id, cursor.id));
}
let query = db
.select({
bookmark_id: bookmarks.id,
bookmark_name: bookmarks.name,
bookmark_description: bookmarks.description,
bookmark_created_at: bookmarks.createdAt,
bookmark_updated_at: bookmarks.updatedAt,
bookmark_articles_count: sql<number>`count(${bookmarkArticles.articleId})`,
bookmark_is_public: bookmarks.isPublic,
})
.from(bookmarks)
.leftJoin(bookmarkArticles, eq(bookmarkArticles.bookmarkId, bookmarks.id))
.groupBy(bookmarks.id);
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const rows = await query
.orderBy(desc(bookmarks.createdAt), desc(bookmarks.id))
.limit(page.limit + 1);
return buildPaginationResult(rows, page, { id: "bookmark_id" });
}
+4
View File
@@ -0,0 +1,4 @@
export * from "./articles";
export * from "./bookmarks";
export * from "./sources";
export * from "./users";
+339
View File
@@ -0,0 +1,339 @@
import type { SQL } from "drizzle-orm";
import { and, desc, eq, lt, or, sql } from "drizzle-orm";
import type { Database } from "@/client";
import { articles, followedSources, sources } from "@/schema";
import {
buildPaginationResult,
createPageState,
decodeCursor,
type PageRequest,
type PaginationMeta,
} from "@/utils/pagination";
import { PUBLICATION_GRAPH_DAYS, SOURCE_IMAGE_BASE } from "@/constant";
export interface SourceOverviewRow {
sourceId: string;
source_display_name: string | null;
source_image: string;
sourceUrl: string;
source_name: string;
source_created_at: string;
source_is_followed: boolean;
}
export interface SourceOverviewResult {
data: SourceOverviewRow[];
pagination: PaginationMeta;
}
export interface PublicationEntry {
day: string;
count: number;
}
export interface CategoryShare {
category: string;
count: number;
percentage: number;
}
export interface SourceDetailsResult {
source: {
sourceId: string;
source_name: string;
source_description: string | null;
sourceUrl: string;
source_updated_at: string | null;
source_display_name: string | null;
source_bias: string;
source_reliability: string;
source_transparency: string;
source_image: string;
articles_count: number;
source_crawled_at: string | null;
articles_metadata_available: number;
source_is_followed: boolean;
};
publicationGraph: PublicationEntry[];
categoryShares: CategoryShare[];
}
export interface SourceStatisticsRow {
sourceId: string;
sourceName: string;
sourceCrawledAt: string | null;
articlesCount: number;
articleMetadataAvailable: number;
}
export async function getSourceStatisticsList(db: Database): Promise<SourceStatisticsRow[]> {
const rows = await db
.select({
sourceId: sources.id,
sourceName: sources.name,
sourceCrawledAt: sql<string | null>`max
(${articles.crawledAt})`,
articlesCount: sql<number>`count
(${articles.id})`,
articleMetadataAvailable: sql<number>`sum
(CASE WHEN ${articles.metadata} IS NOT NULL THEN 1 ELSE 0 END)`,
})
.from(sources)
.leftJoin(articles, eq(articles.sourceId, sources.id))
.groupBy(sources.id, sources.name)
.orderBy(sources.name.asc());
return rows.map((row) => ({
sourceId: row.sourceId,
sourceName: row.sourceName,
sourceCrawledAt: row.sourceCrawledAt,
articlesCount: Number(row.articlesCount ?? 0),
articleMetadataAvailable: Number(row.articleMetadataAvailable ?? 0),
}));
}
export interface PublicationDateParams {
source: string;
category?: string | null;
}
async function selectPublicationBoundary(
db: Database,
fn: "min" | "max",
params: PublicationDateParams,
): Promise<string> {
const conditions: SQL[] = [eq(sources.name, params.source)];
if (params.category) {
conditions.push(sql`${params.category} = ANY(${articles.categories})`);
}
const whereClause = conditions.length > 1 ? and(...conditions) : conditions[0];
const [result] = await db
.select({
boundary:
fn === "min"
? sql<string | null>`min
(${articles.publishedAt})`
: sql<string | null>`max
(${articles.publishedAt})`,
})
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id))
.where(whereClause);
return result?.boundary ?? new Date().toISOString();
}
export async function getEarliestPublicationDate(
db: Database,
params: PublicationDateParams,
): Promise<string> {
return selectPublicationBoundary(db, "min", params);
}
export async function getLatestPublicationDate(
db: Database,
params: PublicationDateParams,
): Promise<string> {
return selectPublicationBoundary(db, "max", params);
}
function buildFollowExistsExpression(userId: string): SQL<boolean> {
return sql`EXISTS
(SELECT 1
FROM ${followedSources} f
WHERE f.sourceId = ${sources.id}
AND f.follower_id = ${userId})`;
}
export async function getSourceOverviewList(
db: Database,
params: { userId: string; page?: PageRequest },
): Promise<SourceOverviewResult> {
const page = createPageState(params.page);
const followExpression = buildFollowExistsExpression(params.userId);
let query = db
.select({
sourceId: sources.id,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
sourceUrl: sources.url,
source_name: sources.name,
source_created_at: sources.createdAt,
source_is_followed: followExpression,
})
.from(sources);
const cursor = decodeCursor(page.cursor);
if (cursor?.date) {
query = query.where(
or(
lt(sources.createdAt, cursor.date),
and(eq(sources.createdAt, cursor.date), lt(sources.id, cursor.id)),
),
);
}
const rows = await query.orderBy(desc(sources.createdAt), desc(sources.id)).limit(page.limit + 1);
return buildPaginationResult(rows, page, {
id: "sourceId",
date: "source_created_at",
});
}
function createBackwardDateRange(days: number): { start: number; end: number } {
const now = new Date();
const end = Math.floor((now.getTime() + 86_400_000) / 1000);
const startDate = new Date(now.getTime() - days * 86_400_000);
const start = Math.floor(startDate.getTime() / 1000);
return { start, end };
}
async function fetchPublicationGraph(db: Database, sourceId: string): Promise<PublicationEntry[]> {
const range = createBackwardDateRange(PUBLICATION_GRAPH_DAYS);
const rows = await db
.select({
day: sql<string>`date
(${articles.publishedAt})`,
count: sql<number>`count
(${articles.id})`,
})
.from(articles)
.where(eq(articles.sourceId, sourceId))
.where(
sql`${articles.publishedAt} BETWEEN to_timestamp(
${range.start}
)
AND
to_timestamp
(
${range.end}
)`,
)
.groupBy(sql`date
(${articles.publishedAt})`)
.orderBy(sql`date
(${articles.publishedAt})`);
const counts = new Map<string, number>();
for (const row of rows) {
counts.set(row.day, Number(row.count ?? 0));
}
const entries: PublicationEntry[] = [];
const start = new Date(range.start * 1000);
const end = new Date(range.end * 1000);
for (let date = new Date(start.getTime()); date < end; date.setUTCDate(date.getUTCDate() + 1)) {
const day = date.toISOString().slice(0, 10);
entries.push({ day, count: counts.get(day) ?? 0 });
}
return entries;
}
async function fetchCategoryShares(db: Database, sourceId: string): Promise<CategoryShare[]> {
const rows = await db
.select({
categories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
})
.from(articles)
.where(eq(articles.sourceId, sourceId));
const counts = new Map<string, number>();
for (const row of rows) {
if (!row.categories) continue;
for (const category of row.categories.split(",")) {
const normalized = category.trim();
if (normalized.length === 0) continue;
counts.set(normalized, (counts.get(normalized) ?? 0) + 1);
}
}
const total = Array.from(counts.values()).reduce((acc, value) => acc + value, 0);
const shares: CategoryShare[] = Array.from(counts.entries()).map(([category, count]) => ({
category,
count,
percentage: total > 0 ? Math.round((count / total) * 10000) / 100 : 0,
}));
shares.sort((a, b) => b.count - a.count);
return shares;
}
export async function getSourceDetails(
db: Database,
params: { sourceId: string; userId: string },
): Promise<SourceDetailsResult | null> {
const followExpression = buildFollowExistsExpression(params.userId);
const [row] = await db
.select({
sourceId: sources.id,
source_name: sources.name,
source_description: sources.description,
sourceUrl: sources.url,
source_updated_at: sources.updatedAt,
source_display_name: sources.displayName,
source_bias: sources.bias,
source_reliability: sources.reliability,
source_transparency: sources.transparency,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
articles_count: sql<number>`count
(${articles.id})`,
source_crawled_at: sql<string | null>`max
(${articles.crawledAt})`,
articles_metadata_available: sql<number>`count
(*)
FILTER (WHERE
${articles.metadata}
IS
NOT
NULL
)`,
source_is_followed: followExpression,
})
.from(sources)
.leftJoin(articles, eq(articles.sourceId, sources.id))
.where(eq(sources.id, params.sourceId))
.groupBy(
sources.id,
sources.name,
sources.description,
sources.url,
sources.updatedAt,
sources.displayName,
sources.bias,
sources.reliability,
sources.transparency,
)
.limit(1);
if (!row) {
return null;
}
const [publicationGraph, categoryShares] = await Promise.all([
fetchPublicationGraph(db, params.sourceId),
fetchCategoryShares(db, params.sourceId),
]);
return {
source: {
...row,
articles_count: Number(row.articles_count ?? 0),
articles_metadata_available: Number(row.articles_metadata_available ?? 0),
},
publicationGraph,
categoryShares,
};
}
+31
View File
@@ -0,0 +1,31 @@
import { eq } from "drizzle-orm";
import type { Database } from "@/client";
import { users } from "@/schema";
export interface UserProfileRow {
user_id: string;
user_name: string;
user_email: string;
user_created_at: string;
user_updated_at: string | null;
}
export async function getUserProfile(
db: Database,
params: { userId: string },
): Promise<UserProfileRow | null> {
const [row] = await db
.select({
user_id: users.id,
user_name: users.name,
user_email: users.email,
user_created_at: users.createdAt,
user_updated_at: users.updatedAt,
})
.from(users)
.where(eq(users.id, params.userId))
.limit(1);
return row ?? null;
}
+454
View File
@@ -0,0 +1,454 @@
import { relations, sql } from "drizzle-orm";
import {
boolean,
customType,
doublePrecision,
foreignKey,
index,
integer,
jsonb,
inet,
pgEnum,
pgTable,
primaryKey,
text,
timestamp,
unique,
uniqueIndex,
uuid,
varchar,
} from "drizzle-orm/pg-core";
export const tsvector = customType<{
data: string;
}>({
dataType() {
return "tsvector";
},
});
type NumericConfig = {
precision?: number;
scale?: number;
};
export const numericCasted = customType<{
data: number;
driverData: string;
config: NumericConfig;
}>({
dataType: (config) => {
if (config?.precision && config?.scale) {
return `numeric(${config.precision}, ${config.scale})`;
}
return "numeric";
},
fromDriver: (value: string) => Number.parseFloat(value),
toDriver: (value: number) => value.toString(),
});
export const articleSentimentEnum = pgEnum("article_sentiment", [
"positive",
"neutral",
"negative",
]);
export const biasEnum = pgEnum("bias", ["neutral", "slightly", "partisan", "extreme"]);
export const reliabilityEnum = pgEnum("reliability", [
"trusted",
"reliable",
"average",
"low_trust",
"unreliable",
]);
export const transparencyEnum = pgEnum("transparency", ["high", "medium", "low"]);
export const verificationTokenPurposeEnum = pgEnum("verification_token_purpose", [
"confirm_account",
"password_reset",
"unlock_account",
"delete_account",
]);
export const sources = pgTable(
"source",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
url: varchar("url", { length: 255 }).notNull(),
name: varchar("name", { length: 255 }).notNull(),
displayName: varchar("display_name", { length: 255 }),
description: varchar("description", { length: 1024 }),
createdAt: timestamp("created_at", { mode: "string" }).defaultNow().notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }),
bias: biasEnum("bias").notNull().default("neutral"),
reliability: reliabilityEnum("reliability").notNull().default("reliable"),
transparency: transparencyEnum("transparency").notNull().default("medium"),
},
(table) => [
uniqueIndex("unq_source_name").using(
"btree",
sql`lower
(${table.name})`,
),
uniqueIndex("unq_sourceUrl").using(
"btree",
sql`lower
(${table.url})`,
),
],
);
export const articles = pgTable(
"article",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
sourceId: uuid("sourceId").notNull(),
title: varchar("title", { length: 1024 }).notNull(),
body: text("body").notNull(),
hash: varchar("hash", { length: 32 }).notNull(),
categories: text("categories").array(),
sentiment: articleSentimentEnum("sentiment").notNull().default("neutral"),
metadata: jsonb("metadata"),
tokenStatistics: jsonb("token_statistics"),
image: varchar("image", { length: 1024 }).generatedAlwaysAs(() => sql`(metadata->>'image')`),
excerpt: varchar("excerpt", { length: 255 }).generatedAlwaysAs(
() => sql`((left(body, 200) || '...'))`,
),
publishedAt: timestamp("published_at", { mode: "string" }).notNull(),
crawledAt: timestamp("crawled_at", { mode: "string" }).notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }),
link: varchar("link", { length: 1024 }).notNull(),
bias: biasEnum("bias").notNull().default("neutral"),
reliability: reliabilityEnum("reliability").notNull().default("reliable"),
transparency: transparencyEnum("transparency").notNull().default("medium"),
readingTime: integer("reading_time").default(1),
tsv: tsvector("tsv").generatedAlwaysAs(
() => sql`(
setweight(to_tsvector('french', coalesce(title, '')), 'A')
|| setweight(to_tsvector('french', coalesce(body, '')), 'B')
)`,
),
},
(table) => [
index("article_sourceId_idx").on(table.sourceId),
index("idx_article_published_at").using("btree", table.publishedAt.desc()),
index("idx_article_published_id").using("btree", table.publishedAt.desc(), table.id.desc()),
unique("unq_article_hash").on(table.hash),
index("gin_article_tsv").using("gin", table.tsv),
index("gin_articleLink_trgm").using("gin", table.link.op("gin_trgm_ops")),
index("gin_articleTitle_trgm").using("gin", table.title.op("gin_trgm_ops")),
index("gin_articleCategories").using("gin", table.categories),
foreignKey({
columns: [table.sourceId],
foreignColumns: [sources.id],
name: "article_sourceId_fkey",
}).onDelete("cascade"),
{
kind: "check",
expression: sql`reading_time >= 0`,
name: "chk_article_reading_time",
},
{
kind: "check",
expression: sql`(metadata IS NULL OR jsonb_typeof(metadata) IN ('object','array'))`,
name: "chk_article_metadata_json",
},
],
);
export const users = pgTable(
"user",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
name: varchar("name", { length: 255 }).notNull(),
email: varchar("email", { length: 255 }).notNull(),
password: varchar("password", { length: 512 }).notNull(),
isLocked: boolean("is_locked").notNull().default(false),
isConfirmed: boolean("is_confirmed").notNull().default(false),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }),
roles: jsonb("roles").notNull(),
},
(table) => [
uniqueIndex("unq_user_email").using("btree", sql`lower (${table.email})`),
{
kind: "check",
name: "chk_user_roles_array",
expression: sql`jsonb_typeof(roles) = 'array'`,
},
],
);
export const bookmarks = pgTable(
"bookmark",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
userId: uuid("user_id").notNull(),
name: varchar("name", { length: 255 }).notNull(),
description: varchar("description", { length: 512 }),
isPublic: boolean("is_public").notNull().default(false),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }),
},
(table) => [
index("bookmark_user_id_idx").on(table.userId),
index("idx_bookmark_user_created").using("btree", table.userId, table.createdAt.desc()),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "bookmark_user_id_fkey",
}).onDelete("cascade"),
],
);
export const bookmarkArticles = pgTable(
"bookmark_article",
{
bookmarkId: uuid("bookmark_id").notNull(),
articleId: uuid("article_id").notNull(),
},
(table) => [
primaryKey({
columns: [table.bookmarkId, table.articleId],
name: "bookmark_article_pkey",
}),
index("bookmark_article_bookmark_idx").on(table.bookmarkId),
index("bookmark_article_article_idx").on(table.articleId),
foreignKey({
columns: [table.bookmarkId],
foreignColumns: [bookmarks.id],
name: "bookmark_article_bookmark_id_fkey",
}).onDelete("cascade"),
foreignKey({
columns: [table.articleId],
foreignColumns: [articles.id],
name: "bookmark_article_article_id_fkey",
}).onDelete("cascade"),
],
);
export const comments = pgTable(
"comment",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
userId: uuid("user_id").notNull(),
articleId: uuid("article_id").notNull(),
content: varchar("content", { length: 512 }).notNull(),
sentiment: articleSentimentEnum("sentiment").notNull().default("neutral"),
isSpam: boolean("is_spam").notNull().default(false),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
},
(table) => [
index("comment_user_id_idx").on(table.userId),
index("comment_article_id_idx").on(table.articleId),
index("idx_comment_article_created").using("btree", table.articleId, table.createdAt.desc()),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "comment_user_id_fkey",
}).onDelete("cascade"),
foreignKey({
columns: [table.articleId],
foreignColumns: [articles.id],
name: "comment_article_id_fkey",
}).onDelete("cascade"),
],
);
export const followedSources = pgTable(
"followed_source",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
followerId: uuid("follower_id").notNull(),
sourceId: uuid("sourceId").notNull(),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
},
(table) => [
index("followed_source_follower_idx").on(table.followerId),
index("followed_source_sourceIdx").on(table.sourceId),
index("idx_followed_source_follower_created").using(
"btree",
table.followerId,
table.createdAt.desc(),
),
foreignKey({
columns: [table.followerId],
foreignColumns: [users.id],
name: "followed_source_follower_id_fkey",
}).onDelete("cascade"),
foreignKey({
columns: [table.sourceId],
foreignColumns: [sources.id],
name: "followed_source_sourceId_fkey",
}).onDelete("cascade"),
],
);
export const loginAttempts = pgTable(
"login_attempt",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
userId: uuid("user_id").notNull(),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
},
(table) => [
index("login_attempt_user_id_idx").on(table.userId),
index("idx_login_attempt_created_at").using("btree", table.createdAt.desc()),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "login_attempt_user_id_fkey",
}).onDelete("cascade"),
],
);
export const loginHistories = pgTable(
"login_history",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
userId: uuid("user_id").notNull(),
ipAddress: inet("ip_address"),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
deviceOperatingSystem: varchar("device_operating_system", { length: 255 }),
deviceClient: varchar("device_client", { length: 255 }),
deviceDevice: varchar("device_device", { length: 255 }),
deviceIsBot: boolean("device_is_bot").notNull().default(false),
locationTimeZone: varchar("location_time_zone", { length: 255 }),
locationLongitude: doublePrecision("location_longitude"),
locationLatitude: doublePrecision("location_latitude"),
locationAccuracyRadius: integer("location_accuracy_radius"),
},
(table) => [
index("login_history_user_id_idx").on(table.userId),
index("idx_login_history_created_at").using("btree", table.userId, table.createdAt.desc()),
index("login_history_ip_address_idx").on(table.ipAddress),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "login_history_user_id_fkey",
}).onDelete("cascade"),
],
);
export const refreshTokens = pgTable(
"refresh_tokens",
{
id: integer("id").generatedAlwaysAsIdentity({ name: "refresh_tokens_id_seq" }).primaryKey(),
refreshToken: varchar("refresh_token", { length: 128 }).notNull(),
username: varchar("username", { length: 255 }).notNull(),
validUntil: timestamp("valid", { mode: "string" }).notNull(),
},
(table) => [unique("uniq_refresh_token_token").on(table.refreshToken)],
);
export const verificationTokens = pgTable(
"verification_token",
{
id: uuid("id").notNull().defaultRandom().primaryKey(),
userId: uuid("user_id").notNull(),
purpose: verificationTokenPurposeEnum("purpose").notNull(),
token: varchar("token", { length: 60 }),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
},
(table) => [
index("verification_token_user_id_idx").on(table.userId),
index("idx_verification_token_created_at").using("btree", table.createdAt.desc()),
uniqueIndex("unq_verification_token_user_purpose")
.on(table.userId, table.purpose)
.where(sql`token IS NOT NULL`),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "verification_token_user_id_fkey",
}).onDelete("cascade"),
],
);
// Relations
export const sourcesRelations = relations(sources, ({ many }) => ({
articles: many(articles),
followers: many(followedSources),
}));
export const articlesRelations = relations(articles, ({ one, many }) => ({
source: one(sources, {
fields: [articles.sourceId],
references: [sources.id],
}),
bookmarkLinks: many(bookmarkArticles),
comments: many(comments),
}));
export const appUsersRelations = relations(users, ({ many }) => ({
bookmarks: many(bookmarks),
comments: many(comments),
loginAttempts: many(loginAttempts),
loginHistories: many(loginHistories),
verificationTokens: many(verificationTokens),
followedSources: many(followedSources),
}));
export const bookmarksRelations = relations(bookmarks, ({ one, many }) => ({
user: one(users, {
fields: [bookmarks.userId],
references: [users.id],
}),
articles: many(bookmarkArticles),
}));
export const bookmarkArticlesRelations = relations(bookmarkArticles, ({ one }) => ({
bookmark: one(bookmarks, {
fields: [bookmarkArticles.bookmarkId],
references: [bookmarks.id],
}),
article: one(articles, {
fields: [bookmarkArticles.articleId],
references: [articles.id],
}),
}));
export const commentsRelations = relations(comments, ({ one }) => ({
article: one(articles, {
fields: [comments.articleId],
references: [articles.id],
}),
user: one(users, {
fields: [comments.userId],
references: [users.id],
}),
}));
export const followedSourcesRelations = relations(followedSources, ({ one }) => ({
follower: one(users, {
fields: [followedSources.followerId],
references: [users.id],
}),
source: one(sources, {
fields: [followedSources.sourceId],
references: [sources.id],
}),
}));
export const loginAttemptsRelations = relations(loginAttempts, ({ one }) => ({
user: one(users, {
fields: [loginAttempts.userId],
references: [users.id],
}),
}));
export const loginHistoriesRelations = relations(loginHistories, ({ one }) => ({
user: one(users, {
fields: [loginHistories.userId],
references: [users.id],
}),
}));
export const verificationTokensRelations = relations(verificationTokens, ({ one }) => ({
user: one(users, {
fields: [verificationTokens.userId],
references: [users.id],
}),
}));
+20
View File
@@ -0,0 +1,20 @@
import { randomBytes } from "node:crypto";
/**
* Generates a new API key with the format mid_{random_string}
* @returns A new API key string
*/
export function generateApiKey(): string {
// Generate 32 random bytes and convert to hex
const randomString = randomBytes(32).toString("hex");
return `basango_${randomString}`;
}
/**
* Validates if a string is a valid API key format
* @param key The key to validate
* @returns True if the key starts with 'basango_' and has the correct length
*/
export function isValidApiKeyFormat(key: string): boolean {
return key.startsWith("basango_") && key.length === 68; // basango_ (8) + 64 hex chars
}
+6
View File
@@ -0,0 +1,6 @@
import { sql } from "drizzle-orm";
import { db } from "@/client";
export async function checkHealth() {
await db.execute(sql`SELECT 1`);
}
+4
View File
@@ -0,0 +1,4 @@
export * from "./api-keys";
export * from "./health";
export * from "./pagination";
export * from "./search-query";
+119
View File
@@ -0,0 +1,119 @@
import { Buffer } from "node:buffer";
export type SortDirection = "asc" | "desc";
export interface PageRequest {
page?: number;
limit?: number;
cursor?: string | null;
}
export interface PageState {
page: number;
limit: number;
cursor: string | null;
offset: number;
}
export interface CursorPayload {
id: string;
date?: string | null;
}
export interface PaginationMeta {
current: number;
limit: number;
cursor: string | null;
hasNext: boolean;
}
const DEFAULT_PAGE = 1;
const DEFAULT_LIMIT = 5;
const MAX_LIMIT = 100;
export function createPageState(request: PageRequest = {}): PageState {
const page =
Number.isFinite(request.page) && (request.page ?? 0) > 0
? Math.trunc(request.page!)
: DEFAULT_PAGE;
let limit =
Number.isFinite(request.limit) && (request.limit ?? 0) > 0
? Math.trunc(request.limit!)
: DEFAULT_LIMIT;
if (limit < DEFAULT_LIMIT) {
limit = DEFAULT_LIMIT;
}
if (limit > MAX_LIMIT) {
limit = MAX_LIMIT;
}
const cursor = request.cursor ?? null;
const offset = (page - 1) * limit;
return { page, limit, cursor, offset };
}
export function encodeCursor(
row: Record<string, unknown>,
keyset: { id: string; date?: string | null },
): string {
const payload: CursorPayload = {
id: String(row[keyset.id] ?? ""),
};
if (keyset.date) {
const value = row[keyset.date];
if (value !== undefined && value !== null) {
payload.date = String(value);
}
}
return Buffer.from(JSON.stringify(payload), "utf8").toString("base64");
}
export function decodeCursor(cursor?: string | null): CursorPayload | null {
if (!cursor) {
return null;
}
try {
const decoded = Buffer.from(cursor, "base64").toString("utf8");
const payload = JSON.parse(decoded) as CursorPayload;
if (!payload || typeof payload.id !== "string" || payload.id.length === 0) {
return null;
}
return payload;
} catch {
return null;
}
}
export function buildPaginationResult<T extends Record<string, unknown>>(
rows: T[],
page: PageState,
keyset: { id: string; date?: string | null },
): { data: T[]; pagination: PaginationMeta } {
const hasNext = rows.length > page.limit;
const data = hasNext ? rows.slice(0, page.limit) : rows;
let cursor: string | null = null;
if (data.length > 0) {
const lastRow = data[data.length - 1];
cursor = encodeCursor(lastRow, keyset);
}
return {
data,
pagination: {
current: page.page,
limit: page.limit,
cursor,
hasNext,
},
};
}
+16
View File
@@ -0,0 +1,16 @@
export const buildSearchQuery = (input: string) => {
const trimmed = input.trim();
if (!trimmed) {
return "";
}
return trimmed
.split(/\s+/)
.map((term) => {
// Escape special characters for PostgreSQL full-text search
// Special characters: & | ! ( ) : * ' " + - ~
const escaped = term.toLowerCase().replace(/[&|!():*'"+~-]/g, "\\$&");
return `${escaped}:*`;
})
.join(" & ");
};