Initial commit

This commit is contained in:
OpenCode Test
2025-12-24 10:50:10 -08:00
commit e1a64aa092
70 changed files with 5827 additions and 0 deletions

View File

@@ -0,0 +1,94 @@
import { z } from "zod";
import { getDb } from "@tline/db";
import { presignGetObjectUrl } from "@tline/minio";
export const runtime = "nodejs";
const paramsSchema = z.object({
id: z.string().uuid()
});
const variantSchema = z.enum(["original", "thumb_small", "thumb_med", "poster"]);
export async function GET(
request: Request,
context: { params: Promise<{ id: string }> }
): Promise<Response> {
const rawParams = await context.params;
const paramsParsed = paramsSchema.safeParse(rawParams);
if (!paramsParsed.success) {
return Response.json(
{ error: "invalid_params", issues: paramsParsed.error.issues },
{ status: 400 },
);
}
const params = paramsParsed.data;
const url = new URL(request.url);
const variantParsed = variantSchema.safeParse(url.searchParams.get("variant") ?? "original");
if (!variantParsed.success) {
return Response.json(
{ error: "invalid_query", issues: variantParsed.error.issues },
{ status: 400 },
);
}
const variant = variantParsed.data;
const db = getDb();
const rows = await db<
{
bucket: string;
active_key: string;
thumb_small_key: string | null;
thumb_med_key: string | null;
poster_key: string | null;
mime_type: string;
}[]
>`
select bucket, active_key, thumb_small_key, thumb_med_key, poster_key, mime_type
from assets
where id = ${params.id}
limit 1
`;
const asset = rows[0];
if (!asset) {
return Response.json({ error: "not_found" }, { status: 404 });
}
const key =
variant === "original"
? asset.active_key
: variant === "thumb_small"
? asset.thumb_small_key
: variant === "thumb_med"
? asset.thumb_med_key
: asset.poster_key;
if (!key) {
return Response.json(
{ error: "variant_not_available", variant },
{ status: 404 }
);
}
// Hint the browser; especially helpful for Range playback.
const responseContentType = variant === "original" ? asset.mime_type : "image/jpeg";
const responseContentDisposition =
variant === "original" && asset.mime_type.startsWith("video/") ? "inline" : undefined;
const signed = await presignGetObjectUrl({
bucket: asset.bucket,
key,
responseContentType,
responseContentDisposition,
});
return Response.json(signed, {
headers: {
"Cache-Control": "no-store"
}
});
}

View File

@@ -0,0 +1,114 @@
import { z } from "zod";
import { getDb } from "@tline/db";
export const runtime = "nodejs";
const querySchema = z
.object({
start: z.string().datetime().optional(),
end: z.string().datetime().optional(),
mediaType: z.enum(["image", "video"]).optional(),
status: z.enum(["new", "processing", "ready", "failed"]).optional(),
limit: z.coerce.number().int().positive().max(200).default(60),
cursor: z.string().uuid().optional(),
cursorTs: z.string().datetime().optional(),
})
.strict();
export async function GET(request: Request): Promise<Response> {
const url = new URL(request.url);
const parsed = querySchema.safeParse({
start: url.searchParams.get("start") ?? undefined,
end: url.searchParams.get("end") ?? undefined,
mediaType: url.searchParams.get("mediaType") ?? undefined,
status: url.searchParams.get("status") ?? undefined,
limit: url.searchParams.get("limit") ?? undefined,
cursor: url.searchParams.get("cursor") ?? undefined,
cursorTs: url.searchParams.get("cursorTs") ?? undefined,
});
if (!parsed.success) {
return Response.json(
{ error: "invalid_query", issues: parsed.error.issues },
{ status: 400 },
);
}
const query = parsed.data;
const start = query.start ? new Date(query.start) : null;
const end = query.end ? new Date(query.end) : null;
const db = getDb();
// Cursor pagination: (capture_ts_utc, id) > (cursorTs, cursor)
const cursorTs = query.cursorTs ? new Date(query.cursorTs) : null;
const cursorId = query.cursor ?? null;
const rows = await db<
{
id: string;
bucket: string;
media_type: "image" | "video";
mime_type: string;
active_key: string;
capture_ts_utc: string | null;
date_confidence: string | null;
width: number | null;
height: number | null;
rotation: number | null;
duration_seconds: number | null;
thumb_small_key: string | null;
thumb_med_key: string | null;
poster_key: string | null;
status: "new" | "processing" | "ready" | "failed";
error_message: string | null;
}[]
>`
select
id,
bucket,
media_type,
mime_type,
active_key,
capture_ts_utc,
date_confidence,
width,
height,
rotation,
duration_seconds,
thumb_small_key,
thumb_med_key,
poster_key,
status,
error_message
from assets
where true
and capture_ts_utc is not null
and (${start}::timestamptz is null or capture_ts_utc >= ${start}::timestamptz)
and (${end}::timestamptz is null or capture_ts_utc < ${end}::timestamptz)
and (${query.mediaType ?? null}::media_type is null or media_type = ${query.mediaType ?? null}::media_type)
and (${query.status ?? null}::asset_status is null or status = ${query.status ?? null}::asset_status)
and (
${cursorId}::uuid is null
or ${cursorTs}::timestamptz is null
or (capture_ts_utc, id) > (${cursorTs}::timestamptz, ${cursorId}::uuid)
)
order by capture_ts_utc asc nulls last, id asc
limit ${query.limit}
`;
const nextCursor = rows.length > 0 ? rows[rows.length - 1] : null;
return Response.json({
start: start ? start.toISOString() : null,
end: end ? end.toISOString() : null,
items: rows,
next:
nextCursor && nextCursor.capture_ts_utc
? { cursor: nextCursor.id, cursorTs: nextCursor.capture_ts_utc }
: null,
});
}

View File

@@ -0,0 +1,3 @@
export function GET() {
return Response.json({ ok: true });
}

View File

@@ -0,0 +1,66 @@
import { z } from "zod";
import { getDb } from "@tline/db";
import { getMinioBucket } from "@tline/minio";
import { enqueueScanMinioPrefix } from "@tline/queue";
export const runtime = "nodejs";
const paramsSchema = z.object({ id: z.string().uuid() });
const bodySchema = z
.object({
bucket: z.string().min(1).optional(),
prefix: z.string().min(1).default("originals/"),
})
.strict();
export async function POST(
request: Request,
context: { params: Promise<{ id: string }> },
): Promise<Response> {
const rawParams = await context.params;
const paramsParsed = paramsSchema.safeParse(rawParams);
if (!paramsParsed.success) {
return Response.json(
{ error: "invalid_params", issues: paramsParsed.error.issues },
{ status: 400 },
);
}
const params = paramsParsed.data;
const bodyJson = await request.json().catch(() => ({}));
const body = bodySchema.parse(bodyJson);
const bucket = body.bucket ?? getMinioBucket();
const db = getDb();
const rows = await db<
{
id: string;
}[]
>`
select id
from imports
where id = ${params.id}
limit 1
`;
const imp = rows[0];
if (!imp) {
return Response.json({ error: "not_found" }, { status: 404 });
}
await enqueueScanMinioPrefix({
importId: imp.id,
bucket,
prefix: body.prefix,
});
await db`
update imports
set status = 'queued'
where id = ${imp.id}
`;
return Response.json({ ok: true, importId: imp.id, bucket, prefix: body.prefix });
}

View File

@@ -0,0 +1,65 @@
import { z } from "zod";
import { getDb } from "@tline/db";
export const runtime = "nodejs";
const paramsSchema = z.object({ id: z.string().uuid() });
export async function GET(
_request: Request,
context: { params: Promise<{ id: string }> },
): Promise<Response> {
const rawParams = await context.params;
const paramsParsed = paramsSchema.safeParse(rawParams);
if (!paramsParsed.success) {
return Response.json(
{ error: "invalid_params", issues: paramsParsed.error.issues },
{ status: 400 },
);
}
const params = paramsParsed.data;
const db = getDb();
const [imp] = await db<
{
id: string;
type: string;
status: string;
created_at: string;
total_count: number | null;
processed_count: number | null;
failed_count: number | null;
}[]
>`
select id, type, status, created_at, total_count, processed_count, failed_count
from imports
where id = ${params.id}
limit 1
`;
if (!imp) {
return Response.json({ error: "not_found" }, { status: 404 });
}
const counts = await db<
{
total: number;
ready: number;
failed: number;
processing: number;
new_count: number;
}[]
>`
select
count(*)::int as total,
count(*) filter (where status = 'ready')::int as ready,
count(*) filter (where status = 'failed')::int as failed,
count(*) filter (where status = 'processing')::int as processing,
count(*) filter (where status = 'new')::int as new_count
from assets
where source_key like ${`staging/${imp.id}/%`}
`;
return Response.json({ ...imp, asset_counts: counts[0] ?? null });
}

View File

@@ -0,0 +1,108 @@
import { randomUUID } from "crypto";
import { Readable } from "stream";
import type { ReadableStream as NodeReadableStream } from "node:stream/web";
import { PutObjectCommand } from "@aws-sdk/client-s3";
import { z } from "zod";
import { getDb } from "@tline/db";
import { getMinioBucket, getMinioInternalClient } from "@tline/minio";
import { enqueueProcessAsset } from "@tline/queue";
export const runtime = "nodejs";
const paramsSchema = z.object({ id: z.string().uuid() });
const contentTypeMediaMap: Array<{
match: (ct: string) => boolean;
mediaType: "image" | "video";
}> = [
{ match: (ct) => ct.startsWith("image/"), mediaType: "image" },
{ match: (ct) => ct.startsWith("video/"), mediaType: "video" },
];
function inferMediaTypeFromContentType(ct: string): "image" | "video" | null {
const found = contentTypeMediaMap.find((m) => m.match(ct));
return found?.mediaType ?? null;
}
function inferExtFromContentType(ct: string): string {
const parts = ct.split("/");
const ext = parts[1] ?? "bin";
return ext.replace(/[^a-zA-Z0-9]+/g, "").toLowerCase() || "bin";
}
export async function POST(
request: Request,
context: { params: Promise<{ id: string }> },
): Promise<Response> {
const rawParams = await context.params;
const paramsParsed = paramsSchema.safeParse(rawParams);
if (!paramsParsed.success) {
return Response.json(
{ error: "invalid_params", issues: paramsParsed.error.issues },
{ status: 400 },
);
}
const params = paramsParsed.data;
const contentType = request.headers.get("content-type") ?? "application/octet-stream";
const mediaType = inferMediaTypeFromContentType(contentType);
if (!mediaType) {
return Response.json({ error: "unsupported_content_type", contentType }, { status: 400 });
}
const bucket = getMinioBucket();
const ext = inferExtFromContentType(contentType);
const objectId = randomUUID();
const key = `staging/${params.id}/${objectId}.${ext}`;
const db = getDb();
const [imp] = await db<{ id: string }[]>`
select id
from imports
where id = ${params.id}
limit 1
`;
if (!imp) {
return Response.json({ error: "import_not_found" }, { status: 404 });
}
if (!request.body) {
return Response.json({ error: "missing_body" }, { status: 400 });
}
const s3 = getMinioInternalClient();
const bodyStream = Readable.fromWeb(request.body as unknown as NodeReadableStream);
await s3.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: bodyStream,
ContentType: contentType,
}),
);
const rows = await db<
{
id: string;
status: "new" | "processing" | "ready" | "failed";
}[]
>`
insert into assets (bucket, media_type, mime_type, source_key, active_key)
values (${bucket}, ${mediaType}, ${contentType}, ${key}, ${key})
on conflict (bucket, source_key)
do update set active_key = excluded.active_key
returning id, status
`;
const asset = rows[0];
if (!asset) {
return Response.json({ error: "asset_insert_failed" }, { status: 500 });
}
await enqueueProcessAsset({ assetId: asset.id });
return Response.json({ ok: true, importId: imp.id, assetId: asset.id, bucket, key });
}

View File

@@ -0,0 +1,37 @@
import { z } from "zod";
import { getDb } from "@tline/db";
export const runtime = "nodejs";
const bodySchema = z
.object({
type: z.enum(["upload", "minio_scan"]).default("upload"),
})
.strict();
export async function POST(request: Request): Promise<Response> {
const bodyJson = await request.json().catch(() => ({}));
const body = bodySchema.parse(bodyJson);
const db = getDb();
const rows = await db<
{
id: string;
type: "upload" | "minio_scan";
status: string;
created_at: string;
}[]
>`
insert into imports (type, status)
values (${body.type}, 'new')
returning id, type, status, created_at
`;
const created = rows[0];
if (!created) {
return Response.json({ error: "insert_failed" }, { status: 500 });
}
return Response.json(created);
}

View File

@@ -0,0 +1,137 @@
import { z } from "zod";
import { getDb } from "@tline/db";
export const runtime = "nodejs";
const querySchema = z
.object({
start: z.string().datetime().optional(),
end: z.string().datetime().optional(),
granularity: z.enum(["year", "month", "day"]).default("day"),
mediaType: z.enum(["image", "video"]).optional(),
includeFailed: z.enum(["0", "1"]).default("0").transform((v) => v === "1"),
limit: z.coerce.number().int().positive().max(500).default(200),
})
.strict();
type Granularity = z.infer<typeof querySchema>["granularity"];
function sqlGroupExpr(granularity: Granularity, alias: string) {
const col = `${alias}.capture_ts_utc`;
if (granularity === "year") return `date_trunc('year', ${col})`;
if (granularity === "month") return `date_trunc('month', ${col})`;
return `date_trunc('day', ${col})`;
}
export async function GET(request: Request): Promise<Response> {
const url = new URL(request.url);
const parsed = querySchema.safeParse({
start: url.searchParams.get("start") ?? undefined,
end: url.searchParams.get("end") ?? undefined,
granularity: url.searchParams.get("granularity") ?? undefined,
mediaType: url.searchParams.get("mediaType") ?? undefined,
includeFailed: url.searchParams.get("includeFailed") ?? undefined,
limit: url.searchParams.get("limit") ?? undefined,
});
if (!parsed.success) {
return Response.json(
{ error: "invalid_query", issues: parsed.error.issues },
{ status: 400 },
);
}
const query = parsed.data;
const start = query.start ? new Date(query.start) : null;
const end = query.end ? new Date(query.end) : null;
const db = getDb();
// Note: capture_ts_utc can be null (unprocessed). Those rows are excluded.
const groupExprFiltered = sqlGroupExpr(query.granularity, "filtered");
const groupExprF = sqlGroupExpr(query.granularity, "f");
const rows = await db<
{
bucket: string;
group_ts: string;
count_total: number;
count_ready: number;
sample_asset_id: string | null;
sample_thumb_small_key: string | null;
sample_thumb_med_key: string | null;
sample_poster_key: string | null;
sample_active_key: string | null;
sample_status: string | null;
sample_media_type: "image" | "video" | null;
}[]
>`
with filtered as (
select
id,
bucket,
media_type,
status,
capture_ts_utc,
active_key,
thumb_small_key,
thumb_med_key,
poster_key
from assets
where capture_ts_utc is not null
and (${start}::timestamptz is null or capture_ts_utc >= ${start}::timestamptz)
and (${end}::timestamptz is null or capture_ts_utc < ${end}::timestamptz)
and (${query.mediaType ?? null}::media_type is null or media_type = ${query.mediaType ?? null}::media_type)
and (
${query.includeFailed}::boolean = true
or status <> 'failed'
)
),
grouped as (
select
bucket,
${db.unsafe(groupExprFiltered)} as group_ts,
count(*)::int as count_total,
count(*) filter (where status = 'ready')::int as count_ready
from filtered
group by bucket, ${db.unsafe(groupExprFiltered)}
order by group_ts desc
limit ${query.limit}
)
select
g.bucket,
to_char(g.group_ts AT TIME ZONE 'UTC', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') as group_ts,
g.count_total,
g.count_ready,
s.id as sample_asset_id,
s.thumb_small_key as sample_thumb_small_key,
s.thumb_med_key as sample_thumb_med_key,
s.poster_key as sample_poster_key,
s.active_key as sample_active_key,
s.status as sample_status,
s.media_type as sample_media_type
from grouped g
left join lateral (
select *
from filtered f
where f.bucket = g.bucket
and ${db.unsafe(groupExprF)} = g.group_ts
and f.status = 'ready'
order by f.capture_ts_utc asc
limit 1
) s on true
order by g.group_ts desc
`;
return Response.json({
granularity: query.granularity,
start: start ? start.toISOString() : null,
end: end ? end.toISOString() : null,
mediaType: query.mediaType ?? null,
includeFailed: query.includeFailed,
nodes: rows,
});
}