Initial commit

This commit is contained in:
OpenCode Test
2025-12-24 10:50:10 -08:00
commit e1a64aa092
70 changed files with 5827 additions and 0 deletions

View File

@@ -0,0 +1,88 @@
-- Task 2 (MVP): assets/imports schema
CREATE EXTENSION IF NOT EXISTS pgcrypto;
DO $$ BEGIN
CREATE TYPE media_type AS ENUM ('image', 'video');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
DO $$ BEGIN
CREATE TYPE asset_status AS ENUM ('new', 'processing', 'ready', 'failed');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
DO $$ BEGIN
CREATE TYPE date_confidence AS ENUM ('camera', 'container', 'object_mtime', 'import_time');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
DO $$ BEGIN
CREATE TYPE import_type AS ENUM ('upload', 'minio_scan', 'normalize_copy');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
CREATE TABLE IF NOT EXISTS imports (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
type import_type NOT NULL,
status text NOT NULL DEFAULT 'new',
created_at timestamptz NOT NULL DEFAULT now(),
total_count int,
processed_count int,
failed_count int
);
CREATE TABLE IF NOT EXISTS assets (
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
bucket text NOT NULL,
media_type media_type NOT NULL,
mime_type text NOT NULL,
source_key text NOT NULL,
active_key text NOT NULL,
canonical_key text,
capture_ts_utc timestamptz,
capture_offset_minutes int,
date_confidence date_confidence,
width int,
height int,
rotation int,
duration_seconds double precision,
thumb_small_key text,
thumb_med_key text,
poster_key text,
status asset_status NOT NULL DEFAULT 'new',
error_message text,
raw_tags_json jsonb,
created_at timestamptz NOT NULL DEFAULT now(),
updated_at timestamptz NOT NULL DEFAULT now()
);
CREATE UNIQUE INDEX IF NOT EXISTS assets_source_key_idx ON assets (source_key);
CREATE INDEX IF NOT EXISTS assets_capture_ts_idx ON assets (capture_ts_utc);
CREATE INDEX IF NOT EXISTS assets_status_idx ON assets (status);
CREATE INDEX IF NOT EXISTS assets_media_type_idx ON assets (media_type);
CREATE OR REPLACE FUNCTION set_updated_at() RETURNS trigger AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
DO $$ BEGIN
CREATE TRIGGER assets_set_updated_at
BEFORE UPDATE ON assets
FOR EACH ROW EXECUTE FUNCTION set_updated_at();
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -0,0 +1,16 @@
-- Task 2 follow-up: align schema with PLAN.md
-- 1) duration_seconds should be int (seconds)
ALTER TABLE assets
ALTER COLUMN duration_seconds
TYPE int
USING (
CASE
WHEN duration_seconds IS NULL THEN NULL
ELSE round(duration_seconds)::int
END
);
-- 2) source_key uniqueness should be per-bucket
DROP INDEX IF EXISTS assets_source_key_idx;
CREATE UNIQUE INDEX IF NOT EXISTS assets_bucket_source_key_uidx ON assets (bucket, source_key);

19
packages/db/package.json Normal file
View File

@@ -0,0 +1,19 @@
{
"name": "@tline/db",
"version": "0.0.0",
"private": true,
"type": "module",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
},
"dependencies": {
"postgres": "^3.4.7",
"zod": "^4.2.1"
},
"scripts": {
"migrate": "bun run src/migrate.ts"
}
}

23
packages/db/src/index.ts Normal file
View File

@@ -0,0 +1,23 @@
import postgres, { type Sql } from "postgres";
import { z } from "zod";
const envSchema = z.object({
DATABASE_URL: z.string().min(1)
});
let cachedDb: Sql | undefined;
export function getDb() {
if (cachedDb) return cachedDb;
const env = envSchema.parse(process.env);
cachedDb = postgres(env.DATABASE_URL);
return cachedDb;
}
export async function closeDb() {
if (!cachedDb) return;
const db = cachedDb;
cachedDb = undefined;
await db.end({ timeout: 5 });
}

View File

@@ -0,0 +1,43 @@
import { readdir } from "node:fs/promises";
import { readFile } from "node:fs/promises";
import path from "node:path";
import postgres from "postgres";
import { z } from "zod";
const envSchema = z.object({
DATABASE_URL: z.string().min(1)
});
async function main() {
const env = envSchema.parse(process.env);
const sql = postgres(env.DATABASE_URL, { max: 1 });
try {
await sql`CREATE TABLE IF NOT EXISTS schema_migrations (id text primary key, applied_at timestamptz not null default now())`;
const migrationsDir = path.join(import.meta.dir, "..", "migrations");
const files = (await readdir(migrationsDir)).filter((f) => f.endsWith(".sql")).sort();
for (const file of files) {
const already = await sql<{ id: string }[]>`SELECT id FROM schema_migrations WHERE id = ${file}`;
if (already.length > 0) continue;
const contents = await readFile(path.join(migrationsDir, file), "utf8");
await sql.begin(async (tx) => {
await tx.unsafe(contents);
await tx`INSERT INTO schema_migrations (id) VALUES (${file})`;
});
// eslint-disable-next-line no-console
console.log(`Applied migration ${file}`);
}
} finally {
await sql.end({ timeout: 5 });
}
}
main().catch((err) => {
// eslint-disable-next-line no-console
console.error(err);
process.exitCode = 1;
});

View File

@@ -0,0 +1,7 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"types": ["bun-types"]
},
"include": ["src/**/*.ts", "migrations/**/*.sql"]
}