feat(api): setting up

This commit is contained in:
2025-11-09 16:28:36 +02:00
parent d72f3871a4
commit 4b82a11207
35 changed files with 2280 additions and 1516 deletions
+1
View File
@@ -0,0 +1 @@
BASANGO_DATABASE_URL="postgresql://postgres:postgres@localhost:5432/app?serverVersion=16&charset=utf8"
+7 -4
View File
@@ -1,10 +1,13 @@
import type { Config } from "drizzle-kit";
import { createEnvAccessor } from "@devscast/config";
import { defineConfig } from "drizzle-kit";
export default {
const env = createEnvAccessor(["BASANGO_DATABASE_URL"] as const);
export default defineConfig({
dbCredentials: {
url: process.env.DATABASE_URL!,
url: env("BASANGO_DATABASE_URL"),
},
dialect: "postgresql",
out: "./migrations",
schema: "./src/schema.ts",
} satisfies Config;
});
@@ -0,0 +1,172 @@
-- Current sql file was generated after introspecting the database
-- If you want to run this migration please uncomment this code before executing migrations
/*
CREATE SEQUENCE "public"."refresh_tokens_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1;--> statement-breakpoint
CREATE TABLE "doctrine_migration_versions" (
"version" varchar(191) PRIMARY KEY NOT NULL,
"executed_at" timestamp(0) DEFAULT NULL,
"execution_time" integer
);
--> statement-breakpoint
CREATE TABLE "bookmark" (
"id" uuid PRIMARY KEY NOT NULL,
"user_id" uuid NOT NULL,
"name" varchar(255) NOT NULL,
"description" varchar(512) DEFAULT NULL,
"is_public" boolean DEFAULT false NOT NULL,
"created_at" timestamp(0) NOT NULL,
"updated_at" timestamp(0) DEFAULT NULL
);
--> statement-breakpoint
CREATE TABLE "login_attempt" (
"id" uuid PRIMARY KEY NOT NULL,
"user_id" uuid NOT NULL,
"created_at" timestamp(0) NOT NULL
);
--> statement-breakpoint
CREATE TABLE "login_history" (
"id" uuid PRIMARY KEY NOT NULL,
"user_id" uuid NOT NULL,
"ip_address" "inet",
"created_at" timestamp(0) NOT NULL,
"device_operating_system" varchar(255) DEFAULT NULL,
"device_client" varchar(255) DEFAULT NULL,
"device_device" varchar(255) DEFAULT NULL,
"device_is_bot" boolean DEFAULT false NOT NULL,
"location_time_zone" varchar(255) DEFAULT NULL,
"location_longitude" double precision,
"location_latitude" double precision,
"location_accuracy_radius" integer
);
--> statement-breakpoint
CREATE TABLE "verification_token" (
"id" uuid PRIMARY KEY NOT NULL,
"user_id" uuid NOT NULL,
"purpose" varchar(255) NOT NULL,
"created_at" timestamp(0) NOT NULL,
"token" varchar(60) DEFAULT NULL
);
--> statement-breakpoint
CREATE TABLE "followed_source" (
"id" uuid PRIMARY KEY NOT NULL,
"follower_id" uuid NOT NULL,
"source_id" uuid NOT NULL,
"created_at" timestamp(0) NOT NULL
);
--> statement-breakpoint
CREATE TABLE "comment" (
"id" uuid PRIMARY KEY NOT NULL,
"user_id" uuid NOT NULL,
"article_id" uuid NOT NULL,
"content" varchar(512) NOT NULL,
"sentiment" varchar(30) DEFAULT 'neutral' NOT NULL,
"is_spam" boolean DEFAULT false NOT NULL,
"created_at" timestamp(0) NOT NULL
);
--> statement-breakpoint
CREATE TABLE "refresh_tokens" (
"id" integer PRIMARY KEY NOT NULL,
"refresh_token" varchar(128) NOT NULL,
"username" varchar(255) NOT NULL,
"valid" timestamp(0) NOT NULL
);
--> statement-breakpoint
CREATE TABLE "article" (
"id" uuid PRIMARY KEY NOT NULL,
"source_id" uuid NOT NULL,
"title" varchar(1024) NOT NULL,
"body" text NOT NULL,
"hash" varchar(32) NOT NULL,
"categories" text[],
"sentiment" varchar(30) DEFAULT 'neutral' NOT NULL,
"metadata" jsonb,
"image" varchar(1024) GENERATED ALWAYS AS ((metadata ->> 'image'::text)) STORED,
"excerpt" varchar(255) GENERATED ALWAYS AS (("left"(body, 200) || '...'::text)) STORED,
"published_at" timestamp(0) NOT NULL,
"crawled_at" timestamp(0) NOT NULL,
"updated_at" timestamp(0) DEFAULT NULL,
"link" varchar(1024) NOT NULL,
"bias" varchar(30) DEFAULT 'neutral' NOT NULL,
"reliability" varchar(30) DEFAULT 'reliable' NOT NULL,
"transparency" varchar(30) DEFAULT 'medium' NOT NULL,
"reading_time" integer DEFAULT 1,
"tsv" "tsvector" GENERATED ALWAYS AS ((setweight(to_tsvector('french'::regconfig, (COALESCE(title, ''::character varying))::text), 'A'::"char") || setweight(to_tsvector('french'::regconfig, COALESCE(body, ''::text)), 'B'::"char"))) STORED,
"token_statistics" jsonb,
CONSTRAINT "chk_article_reading_time" CHECK (reading_time >= 0),
CONSTRAINT "chk_article_sentiment" CHECK ((sentiment)::text = ANY ((ARRAY['positive'::character varying, 'neutral'::character varying, 'negative'::character varying])::text[])),
CONSTRAINT "chk_article_metadata_json" CHECK ((metadata IS NULL) OR (jsonb_typeof(metadata) = ANY (ARRAY['object'::text, 'array'::text])))
);
--> statement-breakpoint
CREATE TABLE "user" (
"id" uuid PRIMARY KEY NOT NULL,
"name" varchar(255) NOT NULL,
"email" varchar(255) NOT NULL,
"password" varchar(512) NOT NULL,
"is_locked" boolean DEFAULT false NOT NULL,
"is_confirmed" boolean DEFAULT false NOT NULL,
"created_at" timestamp(0) NOT NULL,
"updated_at" timestamp(0) DEFAULT NULL,
"roles" jsonb NOT NULL,
CONSTRAINT "chk_user_roles_json" CHECK (jsonb_typeof(roles) = 'array'::text)
);
--> statement-breakpoint
CREATE TABLE "source" (
"id" uuid PRIMARY KEY NOT NULL,
"url" varchar(255) NOT NULL,
"name" varchar(255) NOT NULL,
"display_name" varchar(255) DEFAULT NULL,
"description" varchar(1024) DEFAULT NULL,
"updated_at" timestamp(0) DEFAULT NULL,
"bias" varchar(30) DEFAULT 'neutral' NOT NULL,
"reliability" varchar(30) DEFAULT 'reliable' NOT NULL,
"transparency" varchar(30) DEFAULT 'medium' NOT NULL
);
--> statement-breakpoint
CREATE TABLE "bookmark_article" (
"bookmark_id" uuid NOT NULL,
"article_id" uuid NOT NULL,
CONSTRAINT "bookmark_article_pkey" PRIMARY KEY("bookmark_id","article_id")
);
--> statement-breakpoint
ALTER TABLE "bookmark" ADD CONSTRAINT "fk_da62921da76ed395" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "login_attempt" ADD CONSTRAINT "fk_8c11c1ba76ed395" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "login_history" ADD CONSTRAINT "fk_37976e36a76ed395" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "verification_token" ADD CONSTRAINT "fk_c1cc006ba76ed395" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "followed_source" ADD CONSTRAINT "fk_7a763a3eac24f853" FOREIGN KEY ("follower_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "followed_source" ADD CONSTRAINT "fk_7a763a3e953c1c61" FOREIGN KEY ("source_id") REFERENCES "public"."source"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "comment" ADD CONSTRAINT "fk_9474526ca76ed395" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "comment" ADD CONSTRAINT "fk_9474526c7294869c" FOREIGN KEY ("article_id") REFERENCES "public"."article"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "article" ADD CONSTRAINT "fk_23a0e66953c1c61" FOREIGN KEY ("source_id") REFERENCES "public"."source"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "bookmark_article" ADD CONSTRAINT "fk_6fe2655d92741d25" FOREIGN KEY ("bookmark_id") REFERENCES "public"."bookmark"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "bookmark_article" ADD CONSTRAINT "fk_6fe2655d7294869c" FOREIGN KEY ("article_id") REFERENCES "public"."article"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "idx_bookmark_user_created" ON "bookmark" USING btree ("user_id" timestamp_ops,"created_at" timestamp_ops);--> statement-breakpoint
CREATE INDEX "idx_da62921da76ed395" ON "bookmark" USING btree ("user_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_8c11c1ba76ed395" ON "login_attempt" USING btree ("user_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_login_attempt_created_at" ON "login_attempt" USING btree ("created_at" timestamp_ops);--> statement-breakpoint
CREATE INDEX "idx_37976e36a76ed395" ON "login_history" USING btree ("user_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_login_history_created_at" ON "login_history" USING btree ("user_id" uuid_ops,"created_at" timestamp_ops);--> statement-breakpoint
CREATE INDEX "idx_login_history_ip_address" ON "login_history" USING btree ("ip_address" inet_ops);--> statement-breakpoint
CREATE INDEX "idx_c1cc006ba76ed395" ON "verification_token" USING btree ("user_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_verif_token_created_at" ON "verification_token" USING btree ("created_at" timestamp_ops);--> statement-breakpoint
CREATE UNIQUE INDEX "unq_verif_user_purpose_token" ON "verification_token" USING btree ("user_id" text_ops,"purpose" text_ops) WHERE (token IS NOT NULL);--> statement-breakpoint
CREATE INDEX "idx_7a763a3e953c1c61" ON "followed_source" USING btree ("source_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_7a763a3eac24f853" ON "followed_source" USING btree ("follower_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_followed_source_follower_created" ON "followed_source" USING btree ("follower_id" timestamp_ops,"created_at" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_9474526c7294869c" ON "comment" USING btree ("article_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_9474526ca76ed395" ON "comment" USING btree ("user_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_comment_article_created" ON "comment" USING btree ("article_id" timestamp_ops,"created_at" uuid_ops);--> statement-breakpoint
CREATE UNIQUE INDEX "uniq_9bace7e1c74f2195" ON "refresh_tokens" USING btree ("refresh_token" text_ops);--> statement-breakpoint
CREATE INDEX "gin_article_categories" ON "article" USING gin ("categories" array_ops);--> statement-breakpoint
CREATE INDEX "gin_article_link_trgm" ON "article" USING gin ("link" gin_trgm_ops);--> statement-breakpoint
CREATE INDEX "gin_article_title_trgm" ON "article" USING gin ("title" gin_trgm_ops);--> statement-breakpoint
CREATE INDEX "gin_article_tsv" ON "article" USING gin ("tsv" tsvector_ops);--> statement-breakpoint
CREATE INDEX "idx_23a0e66953c1c61" ON "article" USING btree ("source_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_article_published_at" ON "article" USING btree ("published_at" timestamp_ops);--> statement-breakpoint
CREATE INDEX "idx_article_published_id" ON "article" USING btree ("published_at" timestamp_ops,"id" uuid_ops);--> statement-breakpoint
CREATE UNIQUE INDEX "unq_article_hash" ON "article" USING btree ("hash" text_ops);--> statement-breakpoint
CREATE UNIQUE INDEX "unq_user_email" ON "user" USING btree (lower((email)::text) text_ops);--> statement-breakpoint
CREATE UNIQUE INDEX "unq_source_name" ON "source" USING btree (lower((name)::text) text_ops);--> statement-breakpoint
CREATE UNIQUE INDEX "unq_source_url" ON "source" USING btree (lower((url)::text) text_ops);--> statement-breakpoint
CREATE INDEX "idx_6fe2655d7294869c" ON "bookmark_article" USING btree ("article_id" uuid_ops);--> statement-breakpoint
CREATE INDEX "idx_6fe2655d92741d25" ON "bookmark_article" USING btree ("bookmark_id" uuid_ops);
*/
File diff suppressed because it is too large Load Diff
+13
View File
@@ -0,0 +1,13 @@
{
"dialect": "postgresql",
"entries": [
{
"breakpoints": true,
"idx": 0,
"tag": "0000_aromatic_dorian_gray",
"version": "7",
"when": 1762691204645
}
],
"version": "7"
}
+2 -4
View File
@@ -7,16 +7,14 @@
"snakecase-keys": "^9.0.2"
},
"devDependencies": {
"@types/bun": "catalog:",
"@types/pg": "^8.15.6",
"drizzle-kit": "^0.31.6",
"typescript": "catalog:"
"drizzle-kit": "^0.31.6"
},
"exports": {
"./client": "./src/client.ts",
"./queries": "./src/queries/index.ts",
"./schema": "./src/schema.ts",
"./utils": "./src/utils/index.ts"
"./utils": "./src/utils/*"
},
"name": "@basango/db",
"private": true,
+13 -4
View File
@@ -14,11 +14,20 @@ const connectionConfig = {
};
const pool = new Pool({
connectionString: process.env.DATABASE_URL ?? process.env.DATABASE_PRIMARY_URL!,
connectionString: process.env.BASANGO_DATABASE_URL!,
...connectionConfig,
});
// Lightweight connection pool monitoring (single pool)
/**
* Retrieves runtime statistics for the database connection pool.
*
* This function reads internal pool and connection configuration values and returns
* a snapshot describing pool usage, capacity and utilization. Values that are not
* available on the underlying pool or configuration are normalized to safe defaults
* (zeros or false) so the result is stable.
*
* @returns An object describing the current connection pool statistics and a small summary.
*/
export const getConnectionPoolStats = () => {
const stats = {
active: Math.max(0, (pool.totalCount ?? 0) - (pool.idleCount ?? 0)),
@@ -34,9 +43,9 @@ export const getConnectionPoolStats = () => {
totalConnections > 0 ? Math.round((stats.active / totalConnections) * 100) : 0;
return {
instance: process.env.FLY_ALLOC_ID || "local",
instance: "local",
pools: { primary: stats },
region: process.env.FLY_REGION || "unknown",
region: "unknown",
summary: {
hasExhaustedPools: stats.active >= totalConnections || (stats.waiting ?? 0) > 0,
totalActive: stats.active,
-2
View File
@@ -1,2 +0,0 @@
export const SOURCE_IMAGE_BASE = "https://devscast.org/images/sources/";
export const PUBLICATION_GRAPH_DAYS = 180;
+20
View File
@@ -0,0 +1,20 @@
/**
* Base URL for source images.
* This URL is used to construct the full path to source images stored on the server.
*/
export const SOURCE_IMAGE_BASE = "https://devscast.org/images/sources/";
/**
* Number of days to include in the publication graph for sources.
* This defines the time range for which publication data is aggregated and displayed.
*/
export const PUBLICATION_GRAPH_DAYS = 180;
/**
* Default pagination settings.
* These constants define the default page number, default limit per page,
* and maximum limit allowed for paginated requests.
*/
export const PAGINATION_DEFAULT_PAGE = 1;
export const PAGINATION_DEFAULT_LIMIT = 5;
export const PAGINATION_MAX_LIMIT = 100;
-547
View File
@@ -1,547 +0,0 @@
import type { AnyColumn, SQL } from "drizzle-orm";
import { and, asc, desc, eq, gt, lt, or, sql } from "drizzle-orm";
import type { Database } from "@/client";
import { articles, bookmarkArticles, bookmarks, comments, sources, users } from "@/schema";
import {
type PageRequest,
type PageState,
type PaginationMeta,
type SortDirection,
buildPaginationResult,
createPageState,
decodeCursor,
} from "@/utils/pagination";
export interface ArticleFilters {
search?: string | null;
category?: string | null;
dateRange?: { start: number; end: number } | null;
sortDirection?: SortDirection;
}
export interface ArticleOverviewRow {
article_id: string;
articleTitle: string;
articleLink: string;
articleCategories: string | null;
article_excerpt: string | null;
article_published_at: string;
article_image: string | null;
article_reading_time: number | null;
sourceId: string;
source_display_name: string | null;
source_image: string;
sourceUrl: string;
source_name: string;
source_created_at: string;
article_is_bookmarked: boolean;
}
export interface ArticleOverviewResult {
data: ArticleOverviewRow[];
pagination: PaginationMeta;
}
export interface ArticleDetailsRow {
article_id: string;
articleTitle: string;
articleLink: string;
articleCategories: string | null;
articleBody: string;
article_hash: string;
article_published_at: string;
article_crawled_at: string;
article_updated_at: string | null;
article_bias: string;
article_reliability: string;
article_transparency: string;
article_sentiment: string;
article_metadata: unknown;
article_reading_time: number | null;
sourceId: string;
source_name: string;
source_description: string | null;
sourceUrl: string;
source_updated_at: string | null;
source_display_name: string | null;
source_bias: string;
source_reliability: string;
source_transparency: string;
source_image: string;
article_is_bookmarked: boolean;
}
export interface ArticleCommentRow {
comment_id: string;
comment_content: string;
comment_sentiment: string;
comment_created_at: string;
user_id: string;
user_name: string;
}
interface NormalizedArticleFilters {
search?: string;
category?: string;
dateRange?: { start: number; end: number } | null;
sortDirection: SortDirection;
}
export interface ArticleExportRow {
articleId: string;
articleTitle: string;
articleLink: string;
articleCategories: string | null;
articleBody: string;
articleSource: string;
articleHash: string;
articlePublishedAt: string;
articleCrawledAt: string;
}
export interface ArticleExportParams {
source?: string | null;
dateRange?: { start: number; end: number } | null;
batchSize?: number;
}
export async function* getArticlesForExport(
db: Database,
params: ArticleExportParams = {},
): AsyncGenerator<ArticleExportRow> {
const batchSize = params.batchSize && params.batchSize > 0 ? params.batchSize : 1000;
const filters: SQL[] = [];
if (params.source) {
filters.push(eq(sources.name, params.source));
}
if (params.dateRange) {
filters.push(
sql`${articles.publishedAt} BETWEEN to_timestamp(
${params.dateRange.start}
)
AND
to_timestamp
(
${params.dateRange.end}
)`,
);
}
let query = db
.select({
articleBody: articles.body,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
articleCrawledAt: articles.crawledAt,
articleHash: articles.hash,
articleId: articles.id,
articleLink: articles.link,
articlePublishedAt: articles.publishedAt,
articleSource: sources.name,
articleTitle: articles.title,
})
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id));
if (filters.length === 1) {
query = query.where(filters[0]);
} else if (filters.length > 1) {
query = query.where(and(...filters));
}
query = query.orderBy(desc(articles.publishedAt), desc(articles.id));
let offset = 0;
while (true) {
const rows = await query.limit(batchSize).offset(offset);
if (rows.length === 0) {
break;
}
for (const row of rows) {
yield {
...row,
articleCategories: row.articleCategories ?? null,
};
}
offset += batchSize;
}
}
const SOURCE_IMAGE_BASE = "https://devscast.org/images/sources/";
function normalizeArticleFilters(filters?: ArticleFilters): NormalizedArticleFilters {
const trimmedSearch = filters?.search?.trim();
const trimmedCategory = filters?.category?.trim();
return {
category: trimmedCategory && trimmedCategory.length > 0 ? trimmedCategory : undefined,
dateRange: filters?.dateRange ?? null,
search: trimmedSearch && trimmedSearch.length > 0 ? trimmedSearch : undefined,
sortDirection: filters?.sortDirection ?? "desc",
};
}
function buildArticleFilterConditions(filters: NormalizedArticleFilters): {
conditions: SQL[];
searchQuery?: string;
} {
const conditions: SQL[] = [];
let searchQuery: string | undefined;
if (filters.category) {
conditions.push(sql`${filters.category} = ANY(
${articles.categories}
)`);
}
if (filters.search) {
const sanitized = filters.search.replace(/\s+/g, " & ");
if (sanitized.length > 0) {
searchQuery = sanitized;
conditions.push(
sql`${articles.tsv} @@ to_tsquery('french',
${sanitized}
)`,
);
}
}
if (filters.dateRange) {
conditions.push(
sql`${articles.publishedAt} BETWEEN to_timestamp(
${filters.dateRange.start}
)
AND
to_timestamp
(
${filters.dateRange.end}
)`,
);
}
return { conditions, searchQuery };
}
function buildBookmarkExistsExpression(userId: string): SQL<boolean> {
return sql`EXISTS
(SELECT 1
FROM ${bookmarkArticles} ba
INNER JOIN ${bookmarks} b ON ba.bookmark_id = b.id
WHERE ba.article_id = ${articles.id}
AND b.user_id = ${userId})`;
}
async function fetchArticleOverview(
db: Database,
options: {
userId: string;
page: PageState;
filters: NormalizedArticleFilters;
baseConditions?: SQL[];
},
): Promise<ArticleOverviewResult> {
const baseConditions = options.baseConditions ?? [];
const { conditions: filterConditions, searchQuery } = buildArticleFilterConditions(
options.filters,
);
const whereConditions = [...baseConditions, ...filterConditions];
const bookmarkExpression = buildBookmarkExistsExpression(options.userId);
const selectFields = {
article_excerpt: articles.excerpt,
article_id: articles.id,
article_image: articles.image,
article_is_bookmarked: bookmarkExpression,
article_published_at: articles.publishedAt,
article_reading_time: articles.readingTime,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
articleLink: articles.link,
articleTitle: articles.title,
source_created_at: sources.createdAt,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
source_name: sources.name,
sourceId: sources.id,
sourceUrl: sources.url,
} satisfies Record<string, SQL | AnyColumn>;
let query = db
.select(selectFields)
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id));
const cursor = decodeCursor(options.page.cursor);
if (cursor?.date) {
const comparison =
options.filters.sortDirection === "asc"
? or(
gt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), gt(articles.id, cursor.id)),
)
: or(
lt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), lt(articles.id, cursor.id)),
);
whereConditions.push(comparison);
}
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const orderings: (SQL | AnyColumn)[] = [];
if (searchQuery) {
orderings.push(
options.filters.sortDirection === "asc"
? sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
ASC`
: sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
DESC`,
);
}
if (options.filters.sortDirection === "asc") {
orderings.push(asc(articles.publishedAt), asc(articles.id));
} else {
orderings.push(desc(articles.publishedAt), desc(articles.id));
}
const rows = await query.orderBy(...orderings).limit(options.page.limit + 1);
return buildPaginationResult(rows, options.page, {
date: "article_published_at",
id: "article_id",
});
}
export async function getArticleOverviewList(
db: Database,
params: {
userId: string;
page?: PageRequest;
filters?: ArticleFilters;
},
): Promise<ArticleOverviewResult> {
const page = createPageState(params.page);
const filters = normalizeArticleFilters(params.filters);
return fetchArticleOverview(db, {
filters,
page,
userId: params.userId,
});
}
export async function getSourceArticleOverviewList(
db: Database,
params: {
sourceId: string;
userId: string;
page?: PageRequest;
filters?: ArticleFilters;
},
): Promise<ArticleOverviewResult> {
const page = createPageState(params.page);
const filters = normalizeArticleFilters(params.filters);
return fetchArticleOverview(db, {
baseConditions: [eq(sources.id, params.sourceId)],
filters,
page,
userId: params.userId,
});
}
export async function getBookmarkedArticleList(
db: Database,
params: {
userId: string;
bookmarkId: string;
page?: PageRequest;
filters?: ArticleFilters;
},
): Promise<ArticleOverviewResult> {
const page = createPageState(params.page);
const filters = normalizeArticleFilters(params.filters);
const { conditions: filterConditions, searchQuery } = buildArticleFilterConditions(filters);
const whereConditions: SQL[] = [
eq(bookmarks.id, params.bookmarkId),
eq(bookmarks.userId, params.userId),
...filterConditions,
];
const selectFields = {
article_excerpt: articles.excerpt,
article_id: articles.id,
article_image: articles.image,
article_is_bookmarked: sql<boolean>`true`,
article_published_at: articles.publishedAt,
article_reading_time: articles.readingTime,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
articleLink: articles.link,
articleTitle: articles.title,
source_created_at: sources.createdAt,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
source_name: sources.name,
sourceId: sources.id,
sourceUrl: sources.url,
} satisfies Record<string, SQL | AnyColumn>;
let query = db
.select(selectFields)
.from(bookmarkArticles)
.innerJoin(articles, eq(bookmarkArticles.articleId, articles.id))
.innerJoin(bookmarks, eq(bookmarkArticles.bookmarkId, bookmarks.id))
.innerJoin(sources, eq(articles.sourceId, sources.id));
const cursor = decodeCursor(page.cursor);
if (cursor?.date) {
const comparison =
filters.sortDirection === "asc"
? or(
gt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), gt(articles.id, cursor.id)),
)
: or(
lt(articles.publishedAt, cursor.date),
and(eq(articles.publishedAt, cursor.date), lt(articles.id, cursor.id)),
);
whereConditions.push(comparison);
}
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const orderings: (SQL | AnyColumn)[] = [];
if (searchQuery) {
orderings.push(
filters.sortDirection === "asc"
? sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
ASC`
: sql`ts_rank
(${articles.tsv}, to_tsquery('french', ${searchQuery}))
DESC`,
);
}
if (filters.sortDirection === "asc") {
orderings.push(asc(articles.publishedAt), asc(articles.id));
} else {
orderings.push(desc(articles.publishedAt), desc(articles.id));
}
const rows = await query.orderBy(...orderings).limit(page.limit + 1);
return buildPaginationResult(rows, page, {
date: "article_published_at",
id: "article_id",
});
}
export async function getArticleDetails(
db: Database,
params: { id: string; userId: string },
): Promise<ArticleDetailsRow | null> {
const bookmarkExpression = buildBookmarkExistsExpression(params.userId);
const [row] = await db
.select({
article_bias: articles.bias,
article_crawled_at: articles.crawledAt,
article_hash: articles.hash,
article_id: articles.id,
article_is_bookmarked: bookmarkExpression,
article_metadata: articles.metadata,
article_published_at: articles.publishedAt,
article_reading_time: articles.readingTime,
article_reliability: articles.reliability,
article_sentiment: articles.sentiment,
article_transparency: articles.transparency,
article_updated_at: articles.updatedAt,
articleBody: articles.body,
articleCategories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
articleLink: articles.link,
articleTitle: articles.title,
source_bias: sources.bias,
source_description: sources.description,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
source_name: sources.name,
source_reliability: sources.reliability,
source_transparency: sources.transparency,
source_updated_at: sources.updatedAt,
sourceId: sources.id,
sourceUrl: sources.url,
})
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id))
.where(eq(articles.id, params.id))
.limit(1);
return row ?? null;
}
export async function getArticleCommentList(
db: Database,
params: { articleId: string; page?: PageRequest },
): Promise<{ data: ArticleCommentRow[]; pagination: PaginationMeta }> {
const page = createPageState(params.page);
const whereConditions: SQL[] = [eq(comments.articleId, params.articleId)];
const cursor = decodeCursor(page.cursor);
if (cursor?.date) {
whereConditions.push(
or(
lt(comments.createdAt, cursor.date),
and(eq(comments.createdAt, cursor.date), lt(comments.id, cursor.id)),
),
);
}
let query = db
.select({
comment_content: comments.content,
comment_created_at: comments.createdAt,
comment_id: comments.id,
comment_sentiment: comments.sentiment,
user_id: users.id,
user_name: users.name,
})
.from(comments)
.innerJoin(users, eq(comments.userId, users.id));
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const rows = await query
.orderBy(desc(comments.createdAt), desc(comments.id))
.limit(page.limit + 1);
return buildPaginationResult(rows, page, {
date: "comment_created_at",
id: "comment_id",
});
}
-66
View File
@@ -1,66 +0,0 @@
import type { SQL } from "drizzle-orm";
import { and, desc, eq, lt, sql } from "drizzle-orm";
import type { Database } from "@/client";
import { bookmarkArticles, bookmarks } from "@/schema";
import {
type PageRequest,
type PaginationMeta,
buildPaginationResult,
createPageState,
decodeCursor,
} from "@/utils/pagination";
export interface BookmarkRow {
bookmark_id: string;
bookmark_name: string;
bookmark_description: string | null;
bookmark_created_at: string;
bookmark_updated_at: string | null;
bookmark_articles_count: number;
bookmark_is_public: boolean;
}
export interface BookmarkListResult {
data: BookmarkRow[];
pagination: PaginationMeta;
}
export async function getBookmarkList(
db: Database,
params: { userId: string; page?: PageRequest },
): Promise<BookmarkListResult> {
const page = createPageState(params.page);
const whereConditions: SQL[] = [eq(bookmarks.userId, params.userId)];
const cursor = decodeCursor(page.cursor);
if (cursor?.id) {
whereConditions.push(lt(bookmarks.id, cursor.id));
}
let query = db
.select({
bookmark_articles_count: sql<number>`count(${bookmarkArticles.articleId})`,
bookmark_created_at: bookmarks.createdAt,
bookmark_description: bookmarks.description,
bookmark_id: bookmarks.id,
bookmark_is_public: bookmarks.isPublic,
bookmark_name: bookmarks.name,
bookmark_updated_at: bookmarks.updatedAt,
})
.from(bookmarks)
.leftJoin(bookmarkArticles, eq(bookmarkArticles.bookmarkId, bookmarks.id))
.groupBy(bookmarks.id);
if (whereConditions.length === 1) {
query = query.where(whereConditions[0]);
} else if (whereConditions.length > 1) {
query = query.where(and(...whereConditions));
}
const rows = await query
.orderBy(desc(bookmarks.createdAt), desc(bookmarks.id))
.limit(page.limit + 1);
return buildPaginationResult(rows, page, { id: "bookmark_id" });
}
-4
View File
@@ -1,4 +0,0 @@
export * from "./articles";
export * from "./bookmarks";
export * from "./sources";
export * from "./users";
-339
View File
@@ -1,339 +0,0 @@
import type { SQL } from "drizzle-orm";
import { and, desc, eq, lt, or, sql } from "drizzle-orm";
import type { Database } from "@/client";
import { PUBLICATION_GRAPH_DAYS, SOURCE_IMAGE_BASE } from "@/constant";
import { articles, followedSources, sources } from "@/schema";
import {
type PageRequest,
type PaginationMeta,
buildPaginationResult,
createPageState,
decodeCursor,
} from "@/utils/pagination";
export interface SourceOverviewRow {
sourceId: string;
source_display_name: string | null;
source_image: string;
sourceUrl: string;
source_name: string;
source_created_at: string;
source_is_followed: boolean;
}
export interface SourceOverviewResult {
data: SourceOverviewRow[];
pagination: PaginationMeta;
}
export interface PublicationEntry {
day: string;
count: number;
}
export interface CategoryShare {
category: string;
count: number;
percentage: number;
}
export interface SourceDetailsResult {
source: {
sourceId: string;
source_name: string;
source_description: string | null;
sourceUrl: string;
source_updated_at: string | null;
source_display_name: string | null;
source_bias: string;
source_reliability: string;
source_transparency: string;
source_image: string;
articles_count: number;
source_crawled_at: string | null;
articles_metadata_available: number;
source_is_followed: boolean;
};
publicationGraph: PublicationEntry[];
categoryShares: CategoryShare[];
}
export interface SourceStatisticsRow {
sourceId: string;
sourceName: string;
sourceCrawledAt: string | null;
articlesCount: number;
articleMetadataAvailable: number;
}
export async function getSourceStatisticsList(db: Database): Promise<SourceStatisticsRow[]> {
const rows = await db
.select({
articleMetadataAvailable: sql<number>`sum
(CASE WHEN ${articles.metadata} IS NOT NULL THEN 1 ELSE 0 END)`,
articlesCount: sql<number>`count
(${articles.id})`,
sourceCrawledAt: sql<string | null>`max
(${articles.crawledAt})`,
sourceId: sources.id,
sourceName: sources.name,
})
.from(sources)
.leftJoin(articles, eq(articles.sourceId, sources.id))
.groupBy(sources.id, sources.name)
.orderBy(sources.name.asc());
return rows.map((row) => ({
articleMetadataAvailable: Number(row.articleMetadataAvailable ?? 0),
articlesCount: Number(row.articlesCount ?? 0),
sourceCrawledAt: row.sourceCrawledAt,
sourceId: row.sourceId,
sourceName: row.sourceName,
}));
}
export interface PublicationDateParams {
source: string;
category?: string | null;
}
async function selectPublicationBoundary(
db: Database,
fn: "min" | "max",
params: PublicationDateParams,
): Promise<string> {
const conditions: SQL[] = [eq(sources.name, params.source)];
if (params.category) {
conditions.push(sql`${params.category} = ANY(${articles.categories})`);
}
const whereClause = conditions.length > 1 ? and(...conditions) : conditions[0];
const [result] = await db
.select({
boundary:
fn === "min"
? sql<string | null>`min
(${articles.publishedAt})`
: sql<string | null>`max
(${articles.publishedAt})`,
})
.from(articles)
.innerJoin(sources, eq(articles.sourceId, sources.id))
.where(whereClause);
return result?.boundary ?? new Date().toISOString();
}
export async function getEarliestPublicationDate(
db: Database,
params: PublicationDateParams,
): Promise<string> {
return selectPublicationBoundary(db, "min", params);
}
export async function getLatestPublicationDate(
db: Database,
params: PublicationDateParams,
): Promise<string> {
return selectPublicationBoundary(db, "max", params);
}
function buildFollowExistsExpression(userId: string): SQL<boolean> {
return sql`EXISTS
(SELECT 1
FROM ${followedSources} f
WHERE f.sourceId = ${sources.id}
AND f.follower_id = ${userId})`;
}
export async function getSourceOverviewList(
db: Database,
params: { userId: string; page?: PageRequest },
): Promise<SourceOverviewResult> {
const page = createPageState(params.page);
const followExpression = buildFollowExistsExpression(params.userId);
let query = db
.select({
source_created_at: sources.createdAt,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
source_is_followed: followExpression,
source_name: sources.name,
sourceId: sources.id,
sourceUrl: sources.url,
})
.from(sources);
const cursor = decodeCursor(page.cursor);
if (cursor?.date) {
query = query.where(
or(
lt(sources.createdAt, cursor.date),
and(eq(sources.createdAt, cursor.date), lt(sources.id, cursor.id)),
),
);
}
const rows = await query.orderBy(desc(sources.createdAt), desc(sources.id)).limit(page.limit + 1);
return buildPaginationResult(rows, page, {
date: "source_created_at",
id: "sourceId",
});
}
function createBackwardDateRange(days: number): { start: number; end: number } {
const now = new Date();
const end = Math.floor((now.getTime() + 86_400_000) / 1000);
const startDate = new Date(now.getTime() - days * 86_400_000);
const start = Math.floor(startDate.getTime() / 1000);
return { end, start };
}
async function fetchPublicationGraph(db: Database, sourceId: string): Promise<PublicationEntry[]> {
const range = createBackwardDateRange(PUBLICATION_GRAPH_DAYS);
const rows = await db
.select({
count: sql<number>`count
(${articles.id})`,
day: sql<string>`date
(${articles.publishedAt})`,
})
.from(articles)
.where(eq(articles.sourceId, sourceId))
.where(
sql`${articles.publishedAt} BETWEEN to_timestamp(
${range.start}
)
AND
to_timestamp
(
${range.end}
)`,
)
.groupBy(sql`date
(${articles.publishedAt})`)
.orderBy(sql`date
(${articles.publishedAt})`);
const counts = new Map<string, number>();
for (const row of rows) {
counts.set(row.day, Number(row.count ?? 0));
}
const entries: PublicationEntry[] = [];
const start = new Date(range.start * 1000);
const end = new Date(range.end * 1000);
for (let date = new Date(start.getTime()); date < end; date.setUTCDate(date.getUTCDate() + 1)) {
const day = date.toISOString().slice(0, 10);
entries.push({ count: counts.get(day) ?? 0, day });
}
return entries;
}
async function fetchCategoryShares(db: Database, sourceId: string): Promise<CategoryShare[]> {
const rows = await db
.select({
categories: sql<string | null>`array_to_string
(${articles.categories}, ',')`,
})
.from(articles)
.where(eq(articles.sourceId, sourceId));
const counts = new Map<string, number>();
for (const row of rows) {
if (!row.categories) continue;
for (const category of row.categories.split(",")) {
const normalized = category.trim();
if (normalized.length === 0) continue;
counts.set(normalized, (counts.get(normalized) ?? 0) + 1);
}
}
const total = Array.from(counts.values()).reduce((acc, value) => acc + value, 0);
const shares: CategoryShare[] = Array.from(counts.entries()).map(([category, count]) => ({
category,
count,
percentage: total > 0 ? Math.round((count / total) * 10000) / 100 : 0,
}));
shares.sort((a, b) => b.count - a.count);
return shares;
}
export async function getSourceDetails(
db: Database,
params: { sourceId: string; userId: string },
): Promise<SourceDetailsResult | null> {
const followExpression = buildFollowExistsExpression(params.userId);
const [row] = await db
.select({
articles_count: sql<number>`count
(${articles.id})`,
articles_metadata_available: sql<number>`count
(*)
FILTER (WHERE
${articles.metadata}
IS
NOT
NULL
)`,
source_bias: sources.bias,
source_crawled_at: sql<string | null>`max
(${articles.crawledAt})`,
source_description: sources.description,
source_display_name: sources.displayName,
source_image: sql<string>`('${SOURCE_IMAGE_BASE}' || ${sources.name} || '.png')`,
source_is_followed: followExpression,
source_name: sources.name,
source_reliability: sources.reliability,
source_transparency: sources.transparency,
source_updated_at: sources.updatedAt,
sourceId: sources.id,
sourceUrl: sources.url,
})
.from(sources)
.leftJoin(articles, eq(articles.sourceId, sources.id))
.where(eq(sources.id, params.sourceId))
.groupBy(
sources.id,
sources.name,
sources.description,
sources.url,
sources.updatedAt,
sources.displayName,
sources.bias,
sources.reliability,
sources.transparency,
)
.limit(1);
if (!row) {
return null;
}
const [publicationGraph, categoryShares] = await Promise.all([
fetchPublicationGraph(db, params.sourceId),
fetchCategoryShares(db, params.sourceId),
]);
return {
categoryShares,
publicationGraph,
source: {
...row,
articles_count: Number(row.articles_count ?? 0),
articles_metadata_available: Number(row.articles_metadata_available ?? 0),
},
};
}
-31
View File
@@ -1,31 +0,0 @@
import { eq } from "drizzle-orm";
import type { Database } from "@/client";
import { users } from "@/schema";
export interface UserProfileRow {
user_id: string;
user_name: string;
user_email: string;
user_created_at: string;
user_updated_at: string | null;
}
export async function getUserProfile(
db: Database,
params: { userId: string },
): Promise<UserProfileRow | null> {
const [row] = await db
.select({
user_created_at: users.createdAt,
user_email: users.email,
user_id: users.id,
user_name: users.name,
user_updated_at: users.updatedAt,
})
.from(users)
.where(eq(users.id, params.userId))
.limit(1);
return row ?? null;
}
+334 -349
View File
@@ -1,6 +1,7 @@
import { relations, sql } from "drizzle-orm";
import {
boolean,
check,
customType,
doublePrecision,
foreignKey,
@@ -8,326 +9,207 @@ import {
inet,
integer,
jsonb,
pgEnum,
pgSequence,
pgTable,
primaryKey,
text,
timestamp,
unique,
uniqueIndex,
uuid,
varchar,
} from "drizzle-orm/pg-core";
export const tsvector = customType<{
data: string;
}>({
const tsvector = customType<{ data: string; driverData: string }>({
dataType() {
return "tsvector";
},
});
type NumericConfig = {
precision?: number;
scale?: number;
};
export const numericCasted = customType<{
data: number;
driverData: string;
config: NumericConfig;
}>({
dataType: (config) => {
if (config?.precision && config?.scale) {
return `numeric(${config.precision}, ${config.scale})`;
}
return "numeric";
},
fromDriver: (value: string) => Number.parseFloat(value),
toDriver: (value: number) => value.toString(),
export const refreshTokensIdSeq = pgSequence("refresh_tokens_id_seq", {
cache: "1",
cycle: false,
increment: "1",
maxValue: "9223372036854775807",
minValue: "1",
startWith: "1",
});
export const articleSentimentEnum = pgEnum("article_sentiment", [
"positive",
"neutral",
"negative",
]);
// legacy table for doctrine migrations
export const doctrineMigrationVersions = pgTable("doctrine_migration_versions", {
executedAt: timestamp("executed_at", { mode: "string" }).default(sql`NULL`),
executionTime: integer("execution_time"),
version: varchar({ length: 191 }).primaryKey().notNull(),
});
export const biasEnum = pgEnum("bias", ["neutral", "slightly", "partisan", "extreme"]);
export const reliabilityEnum = pgEnum("reliability", [
"trusted",
"reliable",
"average",
"low_trust",
"unreliable",
]);
export const transparencyEnum = pgEnum("transparency", ["high", "medium", "low"]);
export const verificationTokenPurposeEnum = pgEnum("verification_token_purpose", [
"confirm_account",
"password_reset",
"unlock_account",
"delete_account",
]);
export const sources = pgTable(
"source",
{
bias: biasEnum("bias").notNull().default("neutral"),
createdAt: timestamp("created_at", { mode: "string" }).defaultNow().notNull(),
description: varchar("description", { length: 1024 }),
displayName: varchar("display_name", { length: 255 }),
id: uuid("id").notNull().defaultRandom().primaryKey(),
name: varchar("name", { length: 255 }).notNull(),
reliability: reliabilityEnum("reliability").notNull().default("reliable"),
transparency: transparencyEnum("transparency").notNull().default("medium"),
updatedAt: timestamp("updated_at", { mode: "string" }),
url: varchar("url", { length: 255 }).notNull(),
},
(table) => [
uniqueIndex("unq_source_name").using(
"btree",
sql`lower
(${table.name})`,
),
uniqueIndex("unq_sourceUrl").using(
"btree",
sql`lower
(${table.url})`,
),
],
);
export const articles = pgTable(
"article",
{
bias: biasEnum("bias").notNull().default("neutral"),
body: text("body").notNull(),
categories: text("categories").array(),
crawledAt: timestamp("crawled_at", { mode: "string" }).notNull(),
excerpt: varchar("excerpt", { length: 255 }).generatedAlwaysAs(
() => sql`((left(body, 200) || '...'))`,
),
hash: varchar("hash", { length: 32 }).notNull(),
id: uuid("id").notNull().defaultRandom().primaryKey(),
image: varchar("image", { length: 1024 }).generatedAlwaysAs(() => sql`(metadata->>'image')`),
link: varchar("link", { length: 1024 }).notNull(),
metadata: jsonb("metadata"),
publishedAt: timestamp("published_at", { mode: "string" }).notNull(),
readingTime: integer("reading_time").default(1),
reliability: reliabilityEnum("reliability").notNull().default("reliable"),
sentiment: articleSentimentEnum("sentiment").notNull().default("neutral"),
sourceId: uuid("sourceId").notNull(),
title: varchar("title", { length: 1024 }).notNull(),
tokenStatistics: jsonb("token_statistics"),
transparency: transparencyEnum("transparency").notNull().default("medium"),
tsv: tsvector("tsv").generatedAlwaysAs(
() => sql`(
setweight(to_tsvector('french', coalesce(title, '')), 'A')
|| setweight(to_tsvector('french', coalesce(body, '')), 'B')
)`,
),
updatedAt: timestamp("updated_at", { mode: "string" }),
},
(table) => [
index("article_sourceId_idx").on(table.sourceId),
index("idx_article_published_at").using("btree", table.publishedAt.desc()),
index("idx_article_published_id").using("btree", table.publishedAt.desc(), table.id.desc()),
unique("unq_article_hash").on(table.hash),
index("gin_article_tsv").using("gin", table.tsv),
index("gin_articleLink_trgm").using("gin", table.link.op("gin_trgm_ops")),
index("gin_articleTitle_trgm").using("gin", table.title.op("gin_trgm_ops")),
index("gin_articleCategories").using("gin", table.categories),
foreignKey({
columns: [table.sourceId],
foreignColumns: [sources.id],
name: "article_sourceId_fkey",
}).onDelete("cascade"),
{
expression: sql`reading_time >= 0`,
kind: "check",
name: "chk_article_reading_time",
},
{
expression: sql`(metadata IS NULL OR jsonb_typeof(metadata) IN ('object','array'))`,
kind: "check",
name: "chk_article_metadata_json",
},
],
);
export const users = pgTable(
"user",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
email: varchar("email", { length: 255 }).notNull(),
id: uuid("id").notNull().defaultRandom().primaryKey(),
isConfirmed: boolean("is_confirmed").notNull().default(false),
isLocked: boolean("is_locked").notNull().default(false),
name: varchar("name", { length: 255 }).notNull(),
password: varchar("password", { length: 512 }).notNull(),
roles: jsonb("roles").notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }),
},
(table) => [
uniqueIndex("unq_user_email").using("btree", sql`lower (${table.email})`),
{
expression: sql`jsonb_typeof(roles) = 'array'`,
kind: "check",
name: "chk_user_roles_array",
},
],
);
export const bookmarks = pgTable(
export const bookmark = pgTable(
"bookmark",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
description: varchar("description", { length: 512 }),
id: uuid("id").notNull().defaultRandom().primaryKey(),
isPublic: boolean("is_public").notNull().default(false),
name: varchar("name", { length: 255 }).notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }),
description: varchar({ length: 512 }).default(sql`NULL`),
id: uuid().primaryKey().notNull(),
isPublic: boolean("is_public").default(false).notNull(),
name: varchar({ length: 255 }).notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }).default(sql`NULL`),
userId: uuid("user_id").notNull(),
},
(table) => [
index("bookmark_user_id_idx").on(table.userId),
index("idx_bookmark_user_created").using("btree", table.userId, table.createdAt.desc()),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "bookmark_user_id_fkey",
}).onDelete("cascade"),
],
);
export const bookmarkArticles = pgTable(
"bookmark_article",
{
articleId: uuid("article_id").notNull(),
bookmarkId: uuid("bookmark_id").notNull(),
},
(table) => [
primaryKey({
columns: [table.bookmarkId, table.articleId],
name: "bookmark_article_pkey",
}),
index("bookmark_article_bookmark_idx").on(table.bookmarkId),
index("bookmark_article_article_idx").on(table.articleId),
foreignKey({
columns: [table.bookmarkId],
foreignColumns: [bookmarks.id],
name: "bookmark_article_bookmark_id_fkey",
}).onDelete("cascade"),
foreignKey({
columns: [table.articleId],
foreignColumns: [articles.id],
name: "bookmark_article_article_id_fkey",
}).onDelete("cascade"),
],
);
export const comments = pgTable(
"comment",
{
articleId: uuid("article_id").notNull(),
content: varchar("content", { length: 512 }).notNull(),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
id: uuid("id").notNull().defaultRandom().primaryKey(),
isSpam: boolean("is_spam").notNull().default(false),
sentiment: articleSentimentEnum("sentiment").notNull().default("neutral"),
userId: uuid("user_id").notNull(),
},
(table) => [
index("comment_user_id_idx").on(table.userId),
index("comment_article_id_idx").on(table.articleId),
index("idx_comment_article_created").using("btree", table.articleId, table.createdAt.desc()),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "comment_user_id_fkey",
}).onDelete("cascade"),
foreignKey({
columns: [table.articleId],
foreignColumns: [articles.id],
name: "comment_article_id_fkey",
}).onDelete("cascade"),
],
);
export const followedSources = pgTable(
"followed_source",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
followerId: uuid("follower_id").notNull(),
id: uuid("id").notNull().defaultRandom().primaryKey(),
sourceId: uuid("sourceId").notNull(),
},
(table) => [
index("followed_source_follower_idx").on(table.followerId),
index("followed_source_sourceIdx").on(table.sourceId),
index("idx_followed_source_follower_created").using(
index("idx_bookmark_user_created").using(
"btree",
table.followerId,
table.createdAt.desc(),
table.userId.asc().nullsLast().op("timestamp_ops"),
table.createdAt.desc().nullsFirst().op("timestamp_ops"),
),
index("idx_da62921da76ed395").using("btree", table.userId.asc().nullsLast().op("uuid_ops")),
foreignKey({
columns: [table.followerId],
foreignColumns: [users.id],
name: "followed_source_follower_id_fkey",
}).onDelete("cascade"),
foreignKey({
columns: [table.sourceId],
foreignColumns: [sources.id],
name: "followed_source_sourceId_fkey",
columns: [table.userId],
foreignColumns: [user.id],
name: "fk_da62921da76ed395",
}).onDelete("cascade"),
],
);
export const loginAttempts = pgTable(
export const loginAttempt = pgTable(
"login_attempt",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
id: uuid("id").notNull().defaultRandom().primaryKey(),
id: uuid().primaryKey().notNull(),
userId: uuid("user_id").notNull(),
},
(table) => [
index("login_attempt_user_id_idx").on(table.userId),
index("idx_login_attempt_created_at").using("btree", table.createdAt.desc()),
index("idx_8c11c1ba76ed395").using("btree", table.userId.asc().nullsLast().op("uuid_ops")),
index("idx_login_attempt_created_at").using(
"btree",
table.createdAt.desc().nullsFirst().op("timestamp_ops"),
),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "login_attempt_user_id_fkey",
foreignColumns: [user.id],
name: "fk_8c11c1ba76ed395",
}).onDelete("cascade"),
],
);
export const loginHistories = pgTable(
export const loginHistory = pgTable(
"login_history",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
deviceClient: varchar("device_client", { length: 255 }),
deviceDevice: varchar("device_device", { length: 255 }),
deviceIsBot: boolean("device_is_bot").notNull().default(false),
deviceOperatingSystem: varchar("device_operating_system", { length: 255 }),
id: uuid("id").notNull().defaultRandom().primaryKey(),
deviceClient: varchar("device_client", { length: 255 }).default(sql`NULL`),
deviceDevice: varchar("device_device", { length: 255 }).default(sql`NULL`),
deviceIsBot: boolean("device_is_bot").default(false).notNull(),
deviceOperatingSystem: varchar("device_operating_system", { length: 255 }).default(sql`NULL`),
id: uuid().primaryKey().notNull(),
ipAddress: inet("ip_address"),
locationAccuracyRadius: integer("location_accuracy_radius"),
locationLatitude: doublePrecision("location_latitude"),
locationLongitude: doublePrecision("location_longitude"),
locationTimeZone: varchar("location_time_zone", { length: 255 }),
locationTimeZone: varchar("location_time_zone", { length: 255 }).default(sql`NULL`),
userId: uuid("user_id").notNull(),
},
(table) => [
index("login_history_user_id_idx").on(table.userId),
index("idx_login_history_created_at").using("btree", table.userId, table.createdAt.desc()),
index("login_history_ip_address_idx").on(table.ipAddress),
index("idx_37976e36a76ed395").using("btree", table.userId.asc().nullsLast().op("uuid_ops")),
index("idx_login_history_created_at").using(
"btree",
table.userId.asc().nullsLast().op("uuid_ops"),
table.createdAt.desc().nullsFirst().op("timestamp_ops"),
),
index("idx_login_history_ip_address").using(
"btree",
table.ipAddress.asc().nullsLast().op("inet_ops"),
),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "login_history_user_id_fkey",
foreignColumns: [user.id],
name: "fk_37976e36a76ed395",
}).onDelete("cascade"),
],
);
export const verificationToken = pgTable(
"verification_token",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
id: uuid().primaryKey().notNull(),
purpose: varchar({ length: 255 }).notNull(),
token: varchar({ length: 60 }).default(sql`NULL`),
userId: uuid("user_id").notNull(),
},
(table) => [
index("idx_c1cc006ba76ed395").using("btree", table.userId.asc().nullsLast().op("uuid_ops")),
index("idx_verif_token_created_at").using(
"btree",
table.createdAt.desc().nullsFirst().op("timestamp_ops"),
),
uniqueIndex("unq_verif_user_purpose_token")
.using(
"btree",
table.userId.asc().nullsLast().op("text_ops"),
table.purpose.asc().nullsLast().op("text_ops"),
)
.where(sql`(token IS NOT NULL)`),
foreignKey({
columns: [table.userId],
foreignColumns: [user.id],
name: "fk_c1cc006ba76ed395",
}).onDelete("cascade"),
],
);
export const followedSource = pgTable(
"followed_source",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
followerId: uuid("follower_id").notNull(),
id: uuid().primaryKey().notNull(),
sourceId: uuid("source_id").notNull(),
},
(table) => [
index("idx_7a763a3e953c1c61").using("btree", table.sourceId.asc().nullsLast().op("uuid_ops")),
index("idx_7a763a3eac24f853").using("btree", table.followerId.asc().nullsLast().op("uuid_ops")),
index("idx_followed_source_follower_created").using(
"btree",
table.followerId.asc().nullsLast().op("timestamp_ops"),
table.createdAt.desc().nullsFirst().op("uuid_ops"),
),
foreignKey({
columns: [table.followerId],
foreignColumns: [user.id],
name: "fk_7a763a3eac24f853",
}).onDelete("cascade"),
foreignKey({
columns: [table.sourceId],
foreignColumns: [source.id],
name: "fk_7a763a3e953c1c61",
}).onDelete("cascade"),
],
);
export const comment = pgTable(
"comment",
{
articleId: uuid("article_id").notNull(),
content: varchar({ length: 512 }).notNull(),
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
id: uuid().primaryKey().notNull(),
isSpam: boolean("is_spam").default(false).notNull(),
sentiment: varchar({ length: 30 }).default("neutral").notNull(),
userId: uuid("user_id").notNull(),
},
(table) => [
index("idx_9474526c7294869c").using("btree", table.articleId.asc().nullsLast().op("uuid_ops")),
index("idx_9474526ca76ed395").using("btree", table.userId.asc().nullsLast().op("uuid_ops")),
index("idx_comment_article_created").using(
"btree",
table.articleId.asc().nullsLast().op("timestamp_ops"),
table.createdAt.desc().nullsFirst().op("uuid_ops"),
),
foreignKey({
columns: [table.userId],
foreignColumns: [user.id],
name: "fk_9474526ca76ed395",
}).onDelete("cascade"),
foreignKey({
columns: [table.articleId],
foreignColumns: [article.id],
name: "fk_9474526c7294869c",
}).onDelete("cascade"),
],
);
@@ -335,120 +217,223 @@ export const loginHistories = pgTable(
export const refreshTokens = pgTable(
"refresh_tokens",
{
id: integer("id").generatedAlwaysAsIdentity({ name: "refresh_tokens_id_seq" }).primaryKey(),
id: integer().primaryKey().notNull(),
refreshToken: varchar("refresh_token", { length: 128 }).notNull(),
username: varchar("username", { length: 255 }).notNull(),
validUntil: timestamp("valid", { mode: "string" }).notNull(),
},
(table) => [unique("uniq_refresh_token_token").on(table.refreshToken)],
);
export const verificationTokens = pgTable(
"verification_token",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
id: uuid("id").notNull().defaultRandom().primaryKey(),
purpose: verificationTokenPurposeEnum("purpose").notNull(),
token: varchar("token", { length: 60 }),
userId: uuid("user_id").notNull(),
username: varchar({ length: 255 }).notNull(),
valid: timestamp({ mode: "string" }).notNull(),
},
(table) => [
index("verification_token_user_id_idx").on(table.userId),
index("idx_verification_token_created_at").using("btree", table.createdAt.desc()),
uniqueIndex("unq_verification_token_user_purpose")
.on(table.userId, table.purpose)
.where(sql`token IS NOT NULL`),
foreignKey({
columns: [table.userId],
foreignColumns: [users.id],
name: "verification_token_user_id_fkey",
}).onDelete("cascade"),
uniqueIndex("uniq_9bace7e1c74f2195").using(
"btree",
table.refreshToken.asc().nullsLast().op("text_ops"),
),
],
);
// Relations
export const article = pgTable(
"article",
{
bias: varchar({ length: 30 }).default("neutral").notNull(),
body: text().notNull(),
categories: text().array(),
crawledAt: timestamp("crawled_at", { mode: "string" }).notNull(),
excerpt: varchar({ length: 255 }).generatedAlwaysAs(sql`("left"(body, 200) || '...'::text)`),
hash: varchar({ length: 32 }).notNull(),
id: uuid().primaryKey().notNull(),
image: varchar({ length: 1024 }).generatedAlwaysAs(sql`(metadata ->> 'image'::text)`),
link: varchar({ length: 1024 }).notNull(),
metadata: jsonb(),
publishedAt: timestamp("published_at", { mode: "string" }).notNull(),
readingTime: integer("reading_time").default(1),
reliability: varchar({ length: 30 }).default("reliable").notNull(),
sentiment: varchar({ length: 30 }).default("neutral").notNull(),
sourceId: uuid("source_id").notNull(),
title: varchar({ length: 1024 }).notNull(),
tokenStatistics: jsonb("token_statistics"),
transparency: varchar({ length: 30 }).default("medium").notNull(),
tsv: tsvector("tsv").generatedAlwaysAs(
sql`(setweight(to_tsvector('french'::regconfig, (COALESCE(title, ''::character varying))::text), 'A'::"char") || setweight(to_tsvector('french'::regconfig, COALESCE(body, ''::text)), 'B'::"char"))`,
),
updatedAt: timestamp("updated_at", { mode: "string" }).default(sql`NULL`),
},
(table) => [
index("gin_article_categories").using(
"gin",
table.categories.asc().nullsLast().op("array_ops"),
),
index("gin_article_link_trgm").using("gin", table.link.asc().nullsLast().op("gin_trgm_ops")),
index("gin_article_title_trgm").using("gin", table.title.asc().nullsLast().op("gin_trgm_ops")),
index("gin_article_tsv").using("gin", table.tsv.asc().nullsLast().op("tsvector_ops")),
index("idx_23a0e66953c1c61").using("btree", table.sourceId.asc().nullsLast().op("uuid_ops")),
index("idx_article_published_at").using(
"btree",
table.publishedAt.desc().nullsFirst().op("timestamp_ops"),
),
index("idx_article_published_id").using(
"btree",
table.publishedAt.desc().nullsFirst().op("timestamp_ops"),
table.id.desc().nullsFirst().op("uuid_ops"),
),
uniqueIndex("unq_article_hash").using("btree", table.hash.asc().nullsLast().op("text_ops")),
foreignKey({
columns: [table.sourceId],
foreignColumns: [source.id],
name: "fk_23a0e66953c1c61",
}).onDelete("cascade"),
check("chk_article_reading_time", sql`reading_time >= 0`),
check(
"chk_article_sentiment",
sql`(sentiment)::text = ANY ((ARRAY['positive'::character varying, 'neutral'::character varying, 'negative'::character varying])::text[])`,
),
check(
"chk_article_metadata_json",
sql`(metadata IS NULL) OR (jsonb_typeof(metadata) = ANY (ARRAY['object'::text, 'array'::text]))`,
),
],
);
export const sourcesRelations = relations(sources, ({ many }) => ({
articles: many(articles),
followers: many(followedSources),
}));
export const user = pgTable(
"user",
{
createdAt: timestamp("created_at", { mode: "string" }).notNull(),
email: varchar({ length: 255 }).notNull(),
id: uuid().primaryKey().notNull(),
isConfirmed: boolean("is_confirmed").default(false).notNull(),
isLocked: boolean("is_locked").default(false).notNull(),
name: varchar({ length: 255 }).notNull(),
password: varchar({ length: 512 }).notNull(),
roles: jsonb().notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }).default(sql`NULL`),
},
(_table) => [
uniqueIndex("unq_user_email").using("btree", sql`lower((email)::text)`),
check("chk_user_roles_json", sql`jsonb_typeof(roles) = 'array'::text`),
],
);
export const articlesRelations = relations(articles, ({ one, many }) => ({
bookmarkLinks: many(bookmarkArticles),
comments: many(comments),
source: one(sources, {
fields: [articles.sourceId],
references: [sources.id],
export const source = pgTable(
"source",
{
bias: varchar({ length: 30 }).default("neutral").notNull(),
description: varchar({ length: 1024 }).default(sql`NULL`),
displayName: varchar("display_name", { length: 255 }).default(sql`NULL`),
id: uuid().primaryKey().notNull(),
name: varchar({ length: 255 }).notNull(),
reliability: varchar({ length: 30 }).default("reliable").notNull(),
transparency: varchar({ length: 30 }).default("medium").notNull(),
updatedAt: timestamp("updated_at", { mode: "string" }).default(sql`NULL`),
url: varchar({ length: 255 }).notNull(),
},
(_table) => [
uniqueIndex("unq_source_name").using("btree", sql`lower((name)::text)`),
uniqueIndex("unq_source_url").using("btree", sql`lower((url)::text)`),
],
);
export const bookmarkArticle = pgTable(
"bookmark_article",
{
articleId: uuid("article_id").notNull(),
bookmarkId: uuid("bookmark_id").notNull(),
},
(table) => [
index("idx_6fe2655d7294869c").using("btree", table.articleId.asc().nullsLast().op("uuid_ops")),
index("idx_6fe2655d92741d25").using("btree", table.bookmarkId.asc().nullsLast().op("uuid_ops")),
foreignKey({
columns: [table.bookmarkId],
foreignColumns: [bookmark.id],
name: "fk_6fe2655d92741d25",
}).onDelete("cascade"),
foreignKey({
columns: [table.articleId],
foreignColumns: [article.id],
name: "fk_6fe2655d7294869c",
}).onDelete("cascade"),
primaryKey({ columns: [table.bookmarkId, table.articleId], name: "bookmark_article_pkey" }),
],
);
export const bookmarkRelations = relations(bookmark, ({ one, many }) => ({
bookmarkArticles: many(bookmarkArticle),
user: one(user, {
fields: [bookmark.userId],
references: [user.id],
}),
}));
export const appUsersRelations = relations(users, ({ many }) => ({
bookmarks: many(bookmarks),
comments: many(comments),
followedSources: many(followedSources),
loginAttempts: many(loginAttempts),
loginHistories: many(loginHistories),
verificationTokens: many(verificationTokens),
export const userRelations = relations(user, ({ many }) => ({
bookmarks: many(bookmark),
comments: many(comment),
followedSources: many(followedSource),
loginAttempts: many(loginAttempt),
loginHistories: many(loginHistory),
verificationTokens: many(verificationToken),
}));
export const bookmarksRelations = relations(bookmarks, ({ one, many }) => ({
articles: many(bookmarkArticles),
user: one(users, {
fields: [bookmarks.userId],
references: [users.id],
export const loginAttemptRelations = relations(loginAttempt, ({ one }) => ({
user: one(user, {
fields: [loginAttempt.userId],
references: [user.id],
}),
}));
export const bookmarkArticlesRelations = relations(bookmarkArticles, ({ one }) => ({
article: one(articles, {
fields: [bookmarkArticles.articleId],
references: [articles.id],
}),
bookmark: one(bookmarks, {
fields: [bookmarkArticles.bookmarkId],
references: [bookmarks.id],
export const loginHistoryRelations = relations(loginHistory, ({ one }) => ({
user: one(user, {
fields: [loginHistory.userId],
references: [user.id],
}),
}));
export const commentsRelations = relations(comments, ({ one }) => ({
article: one(articles, {
fields: [comments.articleId],
references: [articles.id],
}),
user: one(users, {
fields: [comments.userId],
references: [users.id],
export const verificationTokenRelations = relations(verificationToken, ({ one }) => ({
user: one(user, {
fields: [verificationToken.userId],
references: [user.id],
}),
}));
export const followedSourcesRelations = relations(followedSources, ({ one }) => ({
follower: one(users, {
fields: [followedSources.followerId],
references: [users.id],
export const followedSourceRelations = relations(followedSource, ({ one }) => ({
source: one(source, {
fields: [followedSource.sourceId],
references: [source.id],
}),
source: one(sources, {
fields: [followedSources.sourceId],
references: [sources.id],
user: one(user, {
fields: [followedSource.followerId],
references: [user.id],
}),
}));
export const loginAttemptsRelations = relations(loginAttempts, ({ one }) => ({
user: one(users, {
fields: [loginAttempts.userId],
references: [users.id],
export const sourceRelations = relations(source, ({ many }) => ({
articles: many(article),
followedSources: many(followedSource),
}));
export const commentRelations = relations(comment, ({ one }) => ({
article: one(article, {
fields: [comment.articleId],
references: [article.id],
}),
user: one(user, {
fields: [comment.userId],
references: [user.id],
}),
}));
export const loginHistoriesRelations = relations(loginHistories, ({ one }) => ({
user: one(users, {
fields: [loginHistories.userId],
references: [users.id],
export const articleRelations = relations(article, ({ one, many }) => ({
bookmarkArticles: many(bookmarkArticle),
comments: many(comment),
source: one(source, {
fields: [article.sourceId],
references: [source.id],
}),
}));
export const verificationTokensRelations = relations(verificationTokens, ({ one }) => ({
user: one(users, {
fields: [verificationTokens.userId],
references: [users.id],
export const bookmarkArticleRelations = relations(bookmarkArticle, ({ one }) => ({
article: one(article, {
fields: [bookmarkArticle.articleId],
references: [article.id],
}),
bookmark: one(bookmark, {
fields: [bookmarkArticle.bookmarkId],
references: [bookmark.id],
}),
}));
+12 -35
View File
@@ -1,5 +1,11 @@
import { Buffer } from "node:buffer";
import {
PAGINATION_DEFAULT_LIMIT,
PAGINATION_DEFAULT_PAGE,
PAGINATION_MAX_LIMIT,
} from "@/constants";
export type SortDirection = "asc" | "desc";
export interface PageRequest {
@@ -27,27 +33,23 @@ export interface PaginationMeta {
hasNext: boolean;
}
const DEFAULT_PAGE = 1;
const DEFAULT_LIMIT = 5;
const MAX_LIMIT = 100;
export function createPageState(request: PageRequest = {}): PageState {
const page =
Number.isFinite(request.page) && (request.page ?? 0) > 0
? Math.trunc(request.page!)
: DEFAULT_PAGE;
: PAGINATION_DEFAULT_PAGE;
let limit =
Number.isFinite(request.limit) && (request.limit ?? 0) > 0
? Math.trunc(request.limit!)
: DEFAULT_LIMIT;
: PAGINATION_DEFAULT_LIMIT;
if (limit < DEFAULT_LIMIT) {
limit = DEFAULT_LIMIT;
if (limit < PAGINATION_DEFAULT_LIMIT) {
limit = PAGINATION_DEFAULT_LIMIT;
}
if (limit > MAX_LIMIT) {
limit = MAX_LIMIT;
if (limit > PAGINATION_MAX_LIMIT) {
limit = PAGINATION_MAX_LIMIT;
}
const cursor = request.cursor ?? null;
@@ -92,28 +94,3 @@ export function decodeCursor(cursor?: string | null): CursorPayload | null {
return null;
}
}
export function buildPaginationResult<T extends Record<string, unknown>>(
rows: T[],
page: PageState,
keyset: { id: string; date?: string | null },
): { data: T[]; pagination: PaginationMeta } {
const hasNext = rows.length > page.limit;
const data = hasNext ? rows.slice(0, page.limit) : rows;
let cursor: string | null = null;
if (data.length > 0) {
const lastRow = data[data.length - 1];
cursor = encodeCursor(lastRow, keyset);
}
return {
data,
pagination: {
current: page.page,
cursor,
hasNext,
limit: page.limit,
},
};
}
+9
View File
@@ -0,0 +1,9 @@
{
"main": "src/index.ts",
"name": "@basango/encryption",
"private": true,
"scripts": {
"clean": "rm -rf .turbo node_modules",
"typecheck": "tsc --noEmit"
}
}
+76
View File
@@ -0,0 +1,76 @@
import crypto from "node:crypto";
import { createEnvAccessor } from "@devscast/config";
export const env = createEnvAccessor(["BASANGO_ENCRYPTION_KEY"] as const);
const ALGORITHM = "aes-256-gcm";
const IV_LENGTH = 16;
const AUTH_TAG_LENGTH = 16;
function getKey(): Buffer {
const key = env("BASANGO_ENCRYPTION_KEY");
if (Buffer.from(key, "hex").length !== 32) {
throw new Error("BASANGO_ENCRYPTION_KEY must be a 64-character hex string (32 bytes).");
}
return Buffer.from(key, "hex");
}
/**
* Encrypts a plaintext string using AES-256-GCM.
* @param text The plaintext string to encrypt.
* @returns A string containing the IV, auth tag, and encrypted text, concatenated and base64 encoded.
*/
export function encrypt(text: string): string {
const key = getKey();
const iv = crypto.randomBytes(IV_LENGTH);
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
let encrypted = cipher.update(text, "utf8", "hex");
encrypted += cipher.final("hex");
const authTag = cipher.getAuthTag();
// Concatenate IV, auth tag, and encrypted data
const encryptedPayload = Buffer.concat([iv, authTag, Buffer.from(encrypted, "hex")]).toString(
"base64",
);
return encryptedPayload;
}
/**
* Decrypts an AES-256-GCM encrypted string.
* @param encryptedPayload The base64 encoded string containing the IV, auth tag, and encrypted text.
* @returns The original plaintext string.
*/
export function decrypt(encryptedPayload: string): string {
const key = getKey();
const dataBuffer = Buffer.from(encryptedPayload, "base64");
// Extract IV, auth tag, and encrypted data
const iv = dataBuffer.subarray(0, IV_LENGTH);
const authTag = dataBuffer.subarray(IV_LENGTH, IV_LENGTH + AUTH_TAG_LENGTH);
const encryptedText = dataBuffer.subarray(IV_LENGTH + AUTH_TAG_LENGTH);
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
decipher.setAuthTag(authTag);
let decrypted = decipher.update(encryptedText.toString("hex"), "hex", "utf8");
decrypted += decipher.final("utf8");
return decrypted;
}
export function hash(str: string): string {
return crypto.createHash("sha256").update(str).digest("hex");
}
export function md5(str: string): string {
return crypto.createHash("md5").update(str).digest("hex");
}
export function generateRandomBytes(size: number): string {
return crypto.randomBytes(size).toString("hex");
}
+5
View File
@@ -0,0 +1,5 @@
{
"exclude": ["node_modules"],
"extends": "@basango/tsconfig/base.json",
"include": ["src/**/*"]
}
-3
View File
@@ -3,9 +3,6 @@
"pino": "^10.1.0",
"pino-pretty": "^13.1.2"
},
"devDependencies": {
"typescript": "catalog:"
},
"main": "src/index.ts",
"name": "@basango/logger",
"private": true,
+5 -2
View File
@@ -1,9 +1,12 @@
import { createEnvAccessor } from "@devscast/config";
import pino from "pino";
const env = createEnvAccessor(["LOG_LEVEL", "NODE_ENV"] as const);
export const logger = pino({
level: process.env.LOG_LEVEL || "debug",
level: env("LOG_LEVEL", { default: "info" }),
// Use pretty printing in development, structured JSON in production
...(process.env.NODE_ENV !== "production" && {
...(env("NODE_ENV") !== "production" && {
transport: {
options: {
colorize: true,
+1 -3
View File
@@ -15,11 +15,9 @@
"@basango/tsconfig": "workspace:*",
"@tailwindcss/postcss": "^4.1.11",
"@turbo/gen": "^2.5.5",
"@types/node": "catalog:",
"@types/react": "catalog:",
"@types/react-dom": "catalog:",
"tailwindcss": "^4.1.11",
"typescript": "catalog:"
"tailwindcss": "^4.1.11"
},
"exports": {
"./components/*": "./src/components/*.tsx",