Initial commit

This commit is contained in:
OpenCode Test
2025-12-24 10:50:10 -08:00
commit e1a64aa092
70 changed files with 5827 additions and 0 deletions

View File

@@ -0,0 +1,12 @@
{
"name": "@tline/config",
"version": "0.0.0",
"private": true,
"type": "module",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
}
}

View File

@@ -0,0 +1,25 @@
import { z } from "zod";
const envSchema = z.object({
APP_NAME: z.string().min(1).default("porthole"),
NEXT_PUBLIC_APP_NAME: z.string().min(1).optional()
});
let cachedEnv: z.infer<typeof envSchema> | undefined;
export function getEnv() {
if (cachedEnv) return cachedEnv;
const parsed = envSchema.safeParse(process.env);
if (!parsed.success) {
throw new Error(`Invalid environment variables: ${parsed.error.message}`);
}
cachedEnv = parsed.data;
return cachedEnv;
}
export function getAppName() {
const env = getEnv();
return env.NEXT_PUBLIC_APP_NAME ?? env.APP_NAME;
}

View File

@@ -0,0 +1,7 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"types": ["bun-types"]
},
"include": ["src/**/*.ts"]
}

View File

@@ -0,0 +1,88 @@
-- Task 2 (MVP): assets/imports schema
CREATE EXTENSION IF NOT EXISTS pgcrypto;
DO $$ BEGIN
CREATE TYPE media_type AS ENUM ('image', 'video');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
DO $$ BEGIN
CREATE TYPE asset_status AS ENUM ('new', 'processing', 'ready', 'failed');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
DO $$ BEGIN
CREATE TYPE date_confidence AS ENUM ('camera', 'container', 'object_mtime', 'import_time');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
DO $$ BEGIN
CREATE TYPE import_type AS ENUM ('upload', 'minio_scan', 'normalize_copy');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
CREATE TABLE IF NOT EXISTS imports (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
type import_type NOT NULL,
status text NOT NULL DEFAULT 'new',
created_at timestamptz NOT NULL DEFAULT now(),
total_count int,
processed_count int,
failed_count int
);
CREATE TABLE IF NOT EXISTS assets (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
bucket text NOT NULL,
media_type media_type NOT NULL,
mime_type text NOT NULL,
source_key text NOT NULL,
active_key text NOT NULL,
canonical_key text,
capture_ts_utc timestamptz,
capture_offset_minutes int,
date_confidence date_confidence,
width int,
height int,
rotation int,
duration_seconds double precision,
thumb_small_key text,
thumb_med_key text,
poster_key text,
status asset_status NOT NULL DEFAULT 'new',
error_message text,
raw_tags_json jsonb,
created_at timestamptz NOT NULL DEFAULT now(),
updated_at timestamptz NOT NULL DEFAULT now()
);
CREATE UNIQUE INDEX IF NOT EXISTS assets_source_key_idx ON assets (source_key);
CREATE INDEX IF NOT EXISTS assets_capture_ts_idx ON assets (capture_ts_utc);
CREATE INDEX IF NOT EXISTS assets_status_idx ON assets (status);
CREATE INDEX IF NOT EXISTS assets_media_type_idx ON assets (media_type);
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS trigger AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DO $$ BEGIN
CREATE TRIGGER assets_set_updated_at
BEFORE UPDATE ON assets
FOR EACH ROW EXECUTE FUNCTION set_updated_at();
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -0,0 +1,16 @@
-- Task 2 follow-up: align schema with PLAN.md
-- 1) duration_seconds should be int (seconds)
ALTER TABLE assets
ALTER COLUMN duration_seconds
TYPE int
USING (
CASE
WHEN duration_seconds IS NULL THEN NULL
ELSE round(duration_seconds)::int
END
);
-- 2) source_key uniqueness should be per-bucket
DROP INDEX IF EXISTS assets_source_key_idx;
CREATE UNIQUE INDEX IF NOT EXISTS assets_bucket_source_key_uidx ON assets (bucket, source_key);

19
packages/db/package.json Normal file
View File

@@ -0,0 +1,19 @@
{
"name": "@tline/db",
"version": "0.0.0",
"private": true,
"type": "module",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
},
"dependencies": {
"postgres": "^3.4.7",
"zod": "^4.2.1"
},
"scripts": {
"migrate": "bun run src/migrate.ts"
}
}

23
packages/db/src/index.ts Normal file
View File

@@ -0,0 +1,23 @@
import postgres, { type Sql } from "postgres";
import { z } from "zod";
const envSchema = z.object({
DATABASE_URL: z.string().min(1)
});
let cachedDb: Sql | undefined;
export function getDb() {
if (cachedDb) return cachedDb;
const env = envSchema.parse(process.env);
cachedDb = postgres(env.DATABASE_URL);
return cachedDb;
}
export async function closeDb() {
if (!cachedDb) return;
const db = cachedDb;
cachedDb = undefined;
await db.end({ timeout: 5 });
}

View File

@@ -0,0 +1,43 @@
import { readdir } from "node:fs/promises";
import { readFile } from "node:fs/promises";
import path from "node:path";
import postgres from "postgres";
import { z } from "zod";
const envSchema = z.object({
DATABASE_URL: z.string().min(1)
});
async function main() {
const env = envSchema.parse(process.env);
const sql = postgres(env.DATABASE_URL, { max: 1 });
try {
await sql`CREATE TABLE IF NOT EXISTS schema_migrations (id text primary key, applied_at timestamptz not null default now())`;
const migrationsDir = path.join(import.meta.dir, "..", "migrations");
const files = (await readdir(migrationsDir)).filter((f) => f.endsWith(".sql")).sort();
for (const file of files) {
const already = await sql<{ id: string }[]>`SELECT id FROM schema_migrations WHERE id = ${file}`;
if (already.length > 0) continue;
const contents = await readFile(path.join(migrationsDir, file), "utf8");
await sql.begin(async (tx) => {
await tx.unsafe(contents);
await tx`INSERT INTO schema_migrations (id) VALUES (${file})`;
});
// eslint-disable-next-line no-console
console.log(`Applied migration ${file}`);
}
} finally {
await sql.end({ timeout: 5 });
}
}
main().catch((err) => {
// eslint-disable-next-line no-console
console.error(err);
process.exitCode = 1;
});

View File

@@ -0,0 +1,7 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"types": ["bun-types"]
},
"include": ["src/**/*.ts", "migrations/**/*.sql"]
}

View File

@@ -0,0 +1,17 @@
{
"name": "@tline/minio",
"version": "0.0.0",
"private": true,
"type": "module",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
},
"dependencies": {
"@aws-sdk/client-s3": "^3.899.0",
"@aws-sdk/s3-request-presigner": "^3.899.0",
"zod": "^4.2.1"
}
}

View File

@@ -0,0 +1,97 @@
import "server-only";
import { GetObjectCommand, S3Client } from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { z } from "zod";
const envSchema = z.object({
MINIO_INTERNAL_ENDPOINT: z.string().url().optional(),
MINIO_PUBLIC_ENDPOINT_TS: z.string().url().optional(),
MINIO_ACCESS_KEY_ID: z.string().min(1),
MINIO_SECRET_ACCESS_KEY: z.string().min(1),
MINIO_REGION: z.string().min(1).default("us-east-1"),
MINIO_BUCKET: z.string().min(1).default("media"),
MINIO_PRESIGN_EXPIRES_SECONDS: z.coerce.number().int().positive().default(900)
});
type MinioEnv = z.infer<typeof envSchema>;
let cachedEnv: MinioEnv | undefined;
let cachedInternal: S3Client | undefined;
let cachedPublic: S3Client | undefined;
export function getMinioEnv(): MinioEnv {
if (cachedEnv) return cachedEnv;
const parsed = envSchema.safeParse(process.env);
if (!parsed.success) {
throw new Error(`Invalid MinIO env: ${parsed.error.message}`);
}
cachedEnv = parsed.data;
return cachedEnv;
}
export function getMinioBucket() {
return getMinioEnv().MINIO_BUCKET;
}
export function getMinioInternalClient(): S3Client {
if (cachedInternal) return cachedInternal;
const env = getMinioEnv();
if (!env.MINIO_INTERNAL_ENDPOINT) {
throw new Error("MINIO_INTERNAL_ENDPOINT is required for internal MinIO client");
}
cachedInternal = new S3Client({
region: env.MINIO_REGION,
endpoint: env.MINIO_INTERNAL_ENDPOINT,
forcePathStyle: true,
credentials: {
accessKeyId: env.MINIO_ACCESS_KEY_ID,
secretAccessKey: env.MINIO_SECRET_ACCESS_KEY
}
});
return cachedInternal;
}
export function getMinioPublicSigningClient(): S3Client {
if (cachedPublic) return cachedPublic;
const env = getMinioEnv();
if (!env.MINIO_PUBLIC_ENDPOINT_TS) {
throw new Error("MINIO_PUBLIC_ENDPOINT_TS is required for presigned URL generation");
}
cachedPublic = new S3Client({
region: env.MINIO_REGION,
endpoint: env.MINIO_PUBLIC_ENDPOINT_TS,
forcePathStyle: true,
credentials: {
accessKeyId: env.MINIO_ACCESS_KEY_ID,
secretAccessKey: env.MINIO_SECRET_ACCESS_KEY
}
});
return cachedPublic;
}
export async function presignGetObjectUrl(input: {
bucket: string;
key: string;
expiresSeconds?: number;
responseContentType?: string;
responseContentDisposition?: string;
}) {
const env = getMinioEnv();
const s3 = getMinioPublicSigningClient();
const command = new GetObjectCommand({
Bucket: input.bucket,
Key: input.key,
ResponseContentType: input.responseContentType,
ResponseContentDisposition: input.responseContentDisposition,
});
const expiresIn = input.expiresSeconds ?? env.MINIO_PRESIGN_EXPIRES_SECONDS;
const url = await getSignedUrl(s3, command, { expiresIn });
return { url, expiresSeconds: expiresIn };
}

View File

@@ -0,0 +1,7 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"types": ["bun-types"]
},
"include": ["src/**/*.ts"]
}

View File

@@ -0,0 +1,17 @@
{
"name": "@tline/queue",
"version": "0.0.0",
"private": true,
"type": "module",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
},
"dependencies": {
"bullmq": "^5.61.0",
"ioredis": "^5.8.0",
"zod": "^4.2.1"
}
}

128
packages/queue/src/index.ts Normal file
View File

@@ -0,0 +1,128 @@
import { z } from "zod";
import { Queue } from "bullmq";
import IORedis from "ioredis";
const envSchema = z.object({
REDIS_URL: z.string().min(1).default("redis://localhost:6379"),
QUEUE_NAME: z.string().min(1).default("tline")
});
export const jobNameSchema = z.enum([
"scan_minio_prefix",
"process_asset",
"copy_to_canonical"
]);
export type QueueJobName = z.infer<typeof jobNameSchema>;
export const scanMinioPrefixPayloadSchema = z
.object({
importId: z.string().uuid(),
bucket: z.string().min(1),
prefix: z.string().min(1)
})
.strict();
export const processAssetPayloadSchema = z
.object({
assetId: z.string().uuid()
})
.strict();
export const copyToCanonicalPayloadSchema = z
.object({
assetId: z.string().uuid()
})
.strict();
export const payloadByJobNameSchema = z.discriminatedUnion("name", [
z.object({ name: z.literal("scan_minio_prefix"), payload: scanMinioPrefixPayloadSchema }),
z.object({ name: z.literal("process_asset"), payload: processAssetPayloadSchema }),
z.object({ name: z.literal("copy_to_canonical"), payload: copyToCanonicalPayloadSchema })
]);
export type ScanMinioPrefixPayload = z.infer<typeof scanMinioPrefixPayloadSchema>;
export type ProcessAssetPayload = z.infer<typeof processAssetPayloadSchema>;
export type CopyToCanonicalPayload = z.infer<typeof copyToCanonicalPayloadSchema>;
type QueueEnv = z.infer<typeof envSchema>;
let cachedEnv: QueueEnv | undefined;
let cachedRedis: IORedis | undefined;
let cachedQueue: Queue | undefined;
export function getQueueEnv(): QueueEnv {
if (cachedEnv) return cachedEnv;
const parsed = envSchema.safeParse(process.env);
if (!parsed.success) {
throw new Error(`Invalid queue env: ${parsed.error.message}`);
}
cachedEnv = parsed.data;
return cachedEnv;
}
export function getQueueName() {
return getQueueEnv().QUEUE_NAME;
}
export function getRedis() {
if (cachedRedis) return cachedRedis;
const env = getQueueEnv();
cachedRedis = new IORedis(env.REDIS_URL, {
lazyConnect: true,
maxRetriesPerRequest: null
});
cachedRedis.on("error", () => {});
return cachedRedis;
}
export function getQueue() {
if (cachedQueue) return cachedQueue;
getQueueEnv();
cachedQueue = new Queue(getQueueName(), {
connection: getRedis()
});
return cachedQueue;
}
export async function closeQueue() {
await Promise.all([
cachedQueue?.close(),
cachedRedis?.quit().catch(() => cachedRedis?.disconnect())
]);
cachedQueue = undefined;
cachedRedis = undefined;
}
export async function enqueueScanMinioPrefix(input: ScanMinioPrefixPayload) {
const payload = scanMinioPrefixPayloadSchema.parse(input);
const queue = getQueue();
return queue.add("scan_minio_prefix", payload, {
attempts: 3,
backoff: { type: "exponential", delay: 1000 }
});
}
export async function enqueueProcessAsset(input: ProcessAssetPayload) {
const payload = processAssetPayloadSchema.parse(input);
const queue = getQueue();
return queue.add("process_asset", payload, {
attempts: 3,
backoff: { type: "exponential", delay: 1000 }
});
}
export async function enqueueCopyToCanonical(input: CopyToCanonicalPayload) {
const payload = copyToCanonicalPayloadSchema.parse(input);
const queue = getQueue();
return queue.add("copy_to_canonical", payload, {
attempts: 3,
backoff: { type: "exponential", delay: 1000 }
});
}

View File

@@ -0,0 +1,7 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"types": ["bun-types"]
},
"include": ["src/**/*.ts"]
}