This commit is contained in:
2026-03-29 06:57:34 -04:00
commit 37503231b3
31 changed files with 3444 additions and 0 deletions

1
alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

78
alembic/env.py Normal file
View File

@@ -0,0 +1,78 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,116 @@
"""Initial schema
Revision ID: 001
Revises:
Create Date: 2026-03-28
"""
from alembic import op
revision = "001"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
CREATE TABLE IF NOT EXISTS public.users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
email TEXT UNIQUE,
password_hash TEXT,
apple_user_id TEXT UNIQUE,
display_name TEXT,
timezone TEXT DEFAULT 'America/Chicago',
distraction_apps TEXT[] DEFAULT '{}',
preferences JSONB DEFAULT '{}',
created_at TIMESTAMPTZ DEFAULT now(),
updated_at TIMESTAMPTZ DEFAULT now(),
CONSTRAINT auth_method CHECK (email IS NOT NULL OR apple_user_id IS NOT NULL)
);
CREATE TABLE IF NOT EXISTS public.tasks (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES public.users(id) ON DELETE CASCADE,
title TEXT NOT NULL,
description TEXT,
priority INT DEFAULT 0,
status TEXT DEFAULT 'pending',
deadline TIMESTAMPTZ,
estimated_minutes INT,
source TEXT DEFAULT 'manual',
tags TEXT[] DEFAULT '{}',
plan_type TEXT,
brain_dump_raw TEXT,
created_at TIMESTAMPTZ DEFAULT now(),
updated_at TIMESTAMPTZ DEFAULT now()
);
CREATE TABLE IF NOT EXISTS public.steps (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
task_id UUID NOT NULL REFERENCES public.tasks(id) ON DELETE CASCADE,
sort_order INT NOT NULL,
title TEXT NOT NULL,
description TEXT,
estimated_minutes INT,
status TEXT DEFAULT 'pending',
checkpoint_note TEXT,
last_checked_at TIMESTAMPTZ,
completed_at TIMESTAMPTZ,
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE TABLE IF NOT EXISTS public.sessions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES public.users(id) ON DELETE CASCADE,
task_id UUID REFERENCES public.tasks(id) ON DELETE SET NULL,
started_at TIMESTAMPTZ DEFAULT now(),
ended_at TIMESTAMPTZ,
status TEXT DEFAULT 'active',
checkpoint JSONB DEFAULT '{}',
created_at TIMESTAMPTZ DEFAULT now()
);
CREATE TABLE IF NOT EXISTS public.distractions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES public.users(id) ON DELETE CASCADE,
session_id UUID REFERENCES public.sessions(id) ON DELETE SET NULL,
detected_at TIMESTAMPTZ DEFAULT now(),
distraction_type TEXT,
app_name TEXT,
duration_seconds INT,
confidence FLOAT,
vlm_summary TEXT,
nudge_shown BOOLEAN DEFAULT false
);
CREATE TABLE IF NOT EXISTS public.distraction_patterns (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES public.users(id) ON DELETE CASCADE,
pattern_type TEXT,
description TEXT,
frequency INT DEFAULT 1,
last_seen TIMESTAMPTZ DEFAULT now(),
metadata JSONB DEFAULT '{}'
);
CREATE INDEX IF NOT EXISTS idx_tasks_user ON tasks(user_id, status);
CREATE INDEX IF NOT EXISTS idx_steps_task ON steps(task_id, sort_order);
CREATE INDEX IF NOT EXISTS idx_steps_status ON steps(task_id, status);
CREATE INDEX IF NOT EXISTS idx_sessions_user ON sessions(user_id, started_at DESC);
CREATE INDEX IF NOT EXISTS idx_sessions_active ON sessions(user_id, status) WHERE status = 'active';
CREATE INDEX IF NOT EXISTS idx_distractions_user ON distractions(user_id, detected_at DESC);
CREATE INDEX IF NOT EXISTS idx_distractions_app ON distractions(user_id, app_name, detected_at DESC);
CREATE INDEX IF NOT EXISTS idx_distractions_hourly ON distractions(user_id, EXTRACT(HOUR FROM detected_at AT TIME ZONE 'UTC'));
""")
def downgrade():
op.execute("""
DROP TABLE IF EXISTS public.distraction_patterns CASCADE;
DROP TABLE IF EXISTS public.distractions CASCADE;
DROP TABLE IF EXISTS public.sessions CASCADE;
DROP TABLE IF EXISTS public.steps CASCADE;
DROP TABLE IF EXISTS public.tasks CASCADE;
DROP TABLE IF EXISTS public.users CASCADE;
""")

View File

@@ -0,0 +1,29 @@
"""Cross-device handoff: device_tokens + session platform
Revision ID: 002
Revises: 001
Create Date: 2026-03-28
"""
from alembic import op
revision = "002"
down_revision = "001"
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
ALTER TABLE users ADD COLUMN IF NOT EXISTS device_tokens JSONB DEFAULT '[]';
ALTER TABLE sessions ADD COLUMN IF NOT EXISTS platform TEXT DEFAULT 'mac';
ALTER TABLE sessions ALTER COLUMN platform SET NOT NULL;
""")
def downgrade():
op.execute("""
ALTER TABLE sessions DROP COLUMN IF EXISTS platform;
ALTER TABLE users DROP COLUMN IF EXISTS device_tokens;
""")

View File

@@ -0,0 +1,36 @@
"""Proactive actions table for Argus layer
Revision ID: 003
Revises: 002
Create Date: 2026-03-28
"""
from alembic import op
revision = "003"
down_revision = "002"
branch_labels = None
depends_on = None
def upgrade():
op.execute("""
CREATE TABLE IF NOT EXISTS public.proactive_actions (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
user_id UUID NOT NULL REFERENCES public.users(id) ON DELETE CASCADE,
session_id UUID REFERENCES public.sessions(id) ON DELETE SET NULL,
friction_type TEXT NOT NULL,
proposed_action TEXT NOT NULL,
user_choice TEXT,
chosen_action TEXT,
executed BOOLEAN DEFAULT false,
detected_at TIMESTAMPTZ DEFAULT now(),
responded_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS idx_proactive_user ON proactive_actions(user_id, friction_type);
""")
def downgrade():
op.execute("DROP TABLE IF EXISTS public.proactive_actions CASCADE;")