feat(domain): centralize data definition
This commit is contained in:
@@ -1,26 +1,45 @@
|
||||
import { DEFAULT_TIMEZONE } from "@basango/domain/constants";
|
||||
import {
|
||||
Distribution,
|
||||
Distributions,
|
||||
ID,
|
||||
PaginationState,
|
||||
Publication,
|
||||
Publications,
|
||||
Sentiment,
|
||||
} from "@basango/domain/models";
|
||||
import { md5 } from "@basango/encryption";
|
||||
import { count, eq } from "drizzle-orm";
|
||||
import type { SQL } from "drizzle-orm";
|
||||
import { count, desc, eq, getTableColumns, sql } from "drizzle-orm";
|
||||
import { v7 as uuidV7 } from "uuid";
|
||||
|
||||
import { Database } from "#db/client";
|
||||
import { getSourceIdByName } from "#db/queries/sources";
|
||||
import { ArticleMetadata, Sentiment, TokenStatistics, articles } from "#db/schema";
|
||||
import { computeReadingTime, computeTokenStatistics } from "#db/utils/computed";
|
||||
|
||||
export type CreateArticleParams = {
|
||||
title: string;
|
||||
body: string;
|
||||
categories: string[];
|
||||
link: string;
|
||||
sourceId: string;
|
||||
publishedAt: Date;
|
||||
sentiment?: Sentiment;
|
||||
tokenStatistics?: TokenStatistics;
|
||||
readingTime?: number;
|
||||
metadata?: ArticleMetadata;
|
||||
};
|
||||
import { articles, sources } from "#db/schema";
|
||||
import { CreateArticleParams, GetArticlesParams } from "#db/types/articles";
|
||||
import { GetDistributionsParams, GetPublicationsParams } from "#db/types/shared";
|
||||
import {
|
||||
applyFilters,
|
||||
buildDateRange,
|
||||
buildKeysetFilter,
|
||||
buildPaginatedResult,
|
||||
buildPaginationState,
|
||||
buildPreviousRange,
|
||||
buildSearchQuery,
|
||||
computeDelta,
|
||||
computeReadingTime,
|
||||
computeTokenStatistics,
|
||||
} from "#db/utils";
|
||||
|
||||
export async function createArticle(db: Database, params: CreateArticleParams) {
|
||||
const duplicated = await getArticleByHash(db, md5(params.link));
|
||||
if (duplicated !== undefined) {
|
||||
return {
|
||||
id: duplicated.id,
|
||||
sourceId: duplicated.sourceId,
|
||||
};
|
||||
}
|
||||
|
||||
const data = {
|
||||
...params,
|
||||
hash: md5(params.link),
|
||||
@@ -34,14 +53,6 @@ export async function createArticle(db: Database, params: CreateArticleParams) {
|
||||
}),
|
||||
};
|
||||
|
||||
const duplicated = await getArticleByHash(db, data.hash);
|
||||
if (duplicated !== undefined) {
|
||||
return {
|
||||
id: duplicated.id,
|
||||
sourceId: duplicated.sourceId,
|
||||
};
|
||||
}
|
||||
|
||||
const [result] = await db
|
||||
.insert(articles)
|
||||
.values({ id: uuidV7(), ...data })
|
||||
@@ -63,7 +74,13 @@ export async function getArticleByHash(db: Database, hash: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export async function countArticlesBySourceId(db: Database, sourceId: string) {
|
||||
export async function getArticleById(db: Database, id: ID) {
|
||||
return await db.query.articles.findFirst({
|
||||
where: eq(articles.id, id),
|
||||
});
|
||||
}
|
||||
|
||||
export async function countArticlesBySourceId(db: Database, sourceId: ID) {
|
||||
const result = await db
|
||||
.select({ count: count(articles.id) })
|
||||
.from(articles)
|
||||
@@ -72,3 +89,148 @@ export async function countArticlesBySourceId(db: Database, sourceId: string) {
|
||||
|
||||
return result?.count ?? 0;
|
||||
}
|
||||
|
||||
function buildFilters(params: GetArticlesParams, pagination: PaginationState) {
|
||||
const filters: SQL<unknown>[] = [];
|
||||
|
||||
if (params.sourceId) {
|
||||
filters.push(eq(articles.sourceId, params.sourceId));
|
||||
}
|
||||
|
||||
if (params.sentiment) {
|
||||
filters.push(eq(articles.sentiment, params.sentiment as Sentiment));
|
||||
}
|
||||
|
||||
if (params.category) {
|
||||
filters.push(sql`${params.category} = ANY(${articles.categories})`);
|
||||
}
|
||||
|
||||
if (params.search?.trim()) {
|
||||
const query = buildSearchQuery(params.search);
|
||||
if (query) {
|
||||
filters.push(sql`${articles.tsv} @@ to_tsquery('french', ${query})`);
|
||||
}
|
||||
}
|
||||
|
||||
const cursorFilter = buildKeysetFilter({
|
||||
cursor: pagination.payload,
|
||||
date: articles.publishedAt,
|
||||
id: articles.id,
|
||||
});
|
||||
|
||||
if (cursorFilter !== undefined) {
|
||||
filters.push(cursorFilter);
|
||||
}
|
||||
|
||||
return filters;
|
||||
}
|
||||
|
||||
export async function getArticles(db: Database, params: GetArticlesParams) {
|
||||
const pagination = buildPaginationState(params);
|
||||
const filters = buildFilters(params, pagination);
|
||||
|
||||
const query = db
|
||||
.select({
|
||||
...getTableColumns(articles),
|
||||
source: {
|
||||
...getTableColumns(sources),
|
||||
},
|
||||
})
|
||||
.from(articles)
|
||||
.innerJoin(sources, eq(articles.sourceId, sources.id));
|
||||
|
||||
const rows = await applyFilters(query, filters)
|
||||
.orderBy(desc(articles.publishedAt), desc(articles.id))
|
||||
.limit(pagination.limit + 1);
|
||||
|
||||
return buildPaginatedResult(rows, pagination, {
|
||||
date: "publishedAt",
|
||||
id: "id",
|
||||
});
|
||||
}
|
||||
|
||||
export async function getArticlesPublicationGraph(
|
||||
db: Database,
|
||||
params: GetPublicationsParams,
|
||||
): Promise<Publications> {
|
||||
const [startDate, endDate] = buildDateRange(params.range);
|
||||
const [previousRangeStart, previousRangeEnd] = buildPreviousRange([startDate, endDate]);
|
||||
|
||||
const data = await db.execute<Publication>(sql`
|
||||
WITH bounds AS (
|
||||
SELECT
|
||||
${startDate}::timestamptz AS start_ts,
|
||||
${endDate}::timestamptz AS end_ts
|
||||
),
|
||||
series AS (
|
||||
SELECT (gs)::date AS d
|
||||
FROM bounds b,
|
||||
LATERAL generate_series(
|
||||
date_trunc('day', timezone(${DEFAULT_TIMEZONE}, b.start_ts)),
|
||||
date_trunc('day', timezone(${DEFAULT_TIMEZONE}, b.end_ts)),
|
||||
INTERVAL '1 day'
|
||||
) AS gs
|
||||
),
|
||||
counts AS (
|
||||
SELECT
|
||||
a.published_at::date AS d,
|
||||
COUNT(*)::int AS c
|
||||
FROM article a, bounds b
|
||||
WHERE a.published_at >= timezone(${DEFAULT_TIMEZONE}, b.start_ts)
|
||||
AND a.published_at <= timezone(${DEFAULT_TIMEZONE}, b.end_ts)
|
||||
GROUP BY 1
|
||||
)
|
||||
SELECT
|
||||
to_char(s.d, 'YYYY-MM-DD') AS date,
|
||||
COALESCE(c.c, 0) AS count
|
||||
FROM series s
|
||||
LEFT JOIN counts c USING (d)
|
||||
ORDER BY s.d ASC
|
||||
`);
|
||||
|
||||
const [previous] = await db
|
||||
.execute<{ count: number }>(
|
||||
sql`
|
||||
SELECT COALESCE(COUNT(*)::int, 0) AS count
|
||||
FROM article a
|
||||
WHERE a.published_at >= timezone(${DEFAULT_TIMEZONE}, ${previousRangeStart})
|
||||
AND a.published_at <= timezone(${DEFAULT_TIMEZONE}, ${previousRangeEnd})
|
||||
`,
|
||||
)
|
||||
.then((res) => res.rows);
|
||||
|
||||
const currentTotal = data.rows.reduce((acc, item) => acc + item.count, 0);
|
||||
const previousTotal = previous?.count ?? 0;
|
||||
|
||||
return {
|
||||
items: data.rows,
|
||||
meta: {
|
||||
current: currentTotal,
|
||||
delta: computeDelta(currentTotal, previousTotal),
|
||||
previous: previousTotal,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function getArticlesSourceDistribution(
|
||||
db: Database,
|
||||
params: GetDistributionsParams,
|
||||
): Promise<Distributions> {
|
||||
const data = await db.execute<Distribution>(sql`
|
||||
SELECT
|
||||
${sources.id}::text AS id,
|
||||
${sources.name} AS name,
|
||||
COUNT(${articles.id})::int AS count,
|
||||
ROUND((COUNT(*)::numeric / SUM(COUNT(*)) OVER ()) * 100, 2)::float AS percentage
|
||||
FROM ${articles}
|
||||
JOIN ${sources} ON ${sources.id} = ${articles.sourceId}
|
||||
GROUP BY ${sources.id}, ${sources.name}
|
||||
ORDER BY count DESC
|
||||
LIMIT ${params.limit ?? 10}
|
||||
`);
|
||||
|
||||
return {
|
||||
items: data.rows,
|
||||
total: data.rows.reduce((acc, item) => acc + item.count, 0),
|
||||
};
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user