API
This commit is contained in:
298
app/routers/tasks.py
Normal file
298
app/routers/tasks.py
Normal file
@@ -0,0 +1,298 @@
|
||||
from datetime import datetime as dt
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
|
||||
from app.middleware.auth import get_current_user_id
|
||||
from app.models import (
|
||||
BrainDumpRequest,
|
||||
BrainDumpResponse,
|
||||
PlanRequest,
|
||||
PlanResponse,
|
||||
StepOut,
|
||||
TaskCreate,
|
||||
TaskOut,
|
||||
TaskUpdate,
|
||||
)
|
||||
from app.services import llm, push
|
||||
from app.services.db import get_pool
|
||||
|
||||
router = APIRouter(prefix="/tasks", tags=["tasks"])
|
||||
|
||||
|
||||
def _row_to_task(row) -> TaskOut:
|
||||
return TaskOut(
|
||||
id=row["id"],
|
||||
user_id=row["user_id"],
|
||||
title=row["title"],
|
||||
description=row["description"],
|
||||
priority=row["priority"],
|
||||
status=row["status"],
|
||||
deadline=row["deadline"],
|
||||
estimated_minutes=row["estimated_minutes"],
|
||||
source=row["source"],
|
||||
tags=row["tags"] or [],
|
||||
plan_type=row["plan_type"],
|
||||
brain_dump_raw=row["brain_dump_raw"],
|
||||
created_at=row["created_at"],
|
||||
updated_at=row["updated_at"],
|
||||
)
|
||||
|
||||
|
||||
TASK_COLUMNS = "id, user_id, title, description, priority, status, deadline, estimated_minutes, source, tags, plan_type, brain_dump_raw, created_at, updated_at"
|
||||
|
||||
|
||||
@router.get("", response_model=list[TaskOut])
|
||||
async def list_tasks(
|
||||
status: str | None = None,
|
||||
priority: int | None = None,
|
||||
sort_by: str = Query("priority", pattern="^(priority|deadline|created_at)$"),
|
||||
user_id: str = Depends(get_current_user_id),
|
||||
):
|
||||
pool = await get_pool()
|
||||
|
||||
query = f"SELECT {TASK_COLUMNS} FROM tasks WHERE user_id = $1::uuid AND status != 'deferred'"
|
||||
params: list = [user_id]
|
||||
idx = 2
|
||||
|
||||
if status:
|
||||
query += f" AND status = ${idx}"
|
||||
params.append(status)
|
||||
idx += 1
|
||||
if priority is not None:
|
||||
query += f" AND priority = ${idx}"
|
||||
params.append(priority)
|
||||
idx += 1
|
||||
|
||||
sort_dir = "DESC" if sort_by == "priority" else "ASC"
|
||||
query += f" ORDER BY {sort_by} {sort_dir}"
|
||||
|
||||
rows = await pool.fetch(query, *params)
|
||||
return [_row_to_task(r) for r in rows]
|
||||
|
||||
|
||||
@router.get("/upcoming", response_model=list[TaskOut])
|
||||
async def upcoming_tasks(user_id: str = Depends(get_current_user_id)):
|
||||
pool = await get_pool()
|
||||
rows = await pool.fetch(
|
||||
f"""SELECT {TASK_COLUMNS} FROM tasks
|
||||
WHERE user_id = $1::uuid AND deadline IS NOT NULL
|
||||
AND deadline <= now() + interval '48 hours'
|
||||
AND status NOT IN ('done', 'deferred')
|
||||
ORDER BY deadline ASC""",
|
||||
user_id,
|
||||
)
|
||||
return [_row_to_task(r) for r in rows]
|
||||
|
||||
|
||||
@router.post("", response_model=TaskOut, status_code=201)
|
||||
async def create_task(req: TaskCreate, user_id: str = Depends(get_current_user_id)):
|
||||
pool = await get_pool()
|
||||
row = await pool.fetchrow(
|
||||
f"""INSERT INTO tasks (user_id, title, description, priority, deadline, estimated_minutes, tags)
|
||||
VALUES ($1::uuid, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING {TASK_COLUMNS}""",
|
||||
user_id,
|
||||
req.title,
|
||||
req.description,
|
||||
req.priority,
|
||||
req.deadline,
|
||||
req.estimated_minutes,
|
||||
req.tags,
|
||||
)
|
||||
await push.send_task_added(user_id, row["title"], step_count=0)
|
||||
return _row_to_task(row)
|
||||
|
||||
|
||||
@router.post("/brain-dump", response_model=BrainDumpResponse)
|
||||
async def brain_dump(req: BrainDumpRequest, user_id: str = Depends(get_current_user_id)):
|
||||
result = await llm.parse_brain_dump(req.raw_text, req.timezone)
|
||||
|
||||
pool = await get_pool()
|
||||
parsed_tasks = []
|
||||
for t in result.get("parsed_tasks", []):
|
||||
# Parse deadline string from LLM into datetime (asyncpg needs datetime, not str)
|
||||
deadline = t.get("deadline")
|
||||
if isinstance(deadline, str) and deadline and deadline != "null":
|
||||
try:
|
||||
deadline = dt.fromisoformat(deadline)
|
||||
except ValueError:
|
||||
deadline = None
|
||||
else:
|
||||
deadline = None
|
||||
|
||||
est_minutes = t.get("estimated_minutes")
|
||||
if isinstance(est_minutes, str):
|
||||
try:
|
||||
est_minutes = int(est_minutes)
|
||||
except ValueError:
|
||||
est_minutes = None
|
||||
|
||||
subtasks_raw = t.get("subtasks") or []
|
||||
has_subtasks = len(subtasks_raw) > 0
|
||||
|
||||
row = await pool.fetchrow(
|
||||
f"""INSERT INTO tasks (user_id, title, description, priority, deadline,
|
||||
estimated_minutes, source, tags, brain_dump_raw, plan_type)
|
||||
VALUES ($1::uuid, $2, $3, $4, $5::timestamptz, $6, $7, $8, $9, $10)
|
||||
RETURNING {TASK_COLUMNS}""",
|
||||
user_id,
|
||||
t["title"],
|
||||
t.get("description"),
|
||||
int(t.get("priority", 0)),
|
||||
deadline,
|
||||
est_minutes,
|
||||
req.source,
|
||||
t.get("tags", []),
|
||||
req.raw_text,
|
||||
"brain_dump" if has_subtasks else None,
|
||||
)
|
||||
task_id = row["id"]
|
||||
|
||||
all_subtasks = []
|
||||
sort_order = 1
|
||||
for sub in subtasks_raw:
|
||||
sub_est = sub.get("estimated_minutes")
|
||||
if isinstance(sub_est, str):
|
||||
try:
|
||||
sub_est = int(sub_est)
|
||||
except ValueError:
|
||||
sub_est = None
|
||||
|
||||
sub_deadline = sub.get("deadline")
|
||||
if isinstance(sub_deadline, str) and sub_deadline and sub_deadline != "null":
|
||||
try:
|
||||
sub_deadline = dt.fromisoformat(sub_deadline)
|
||||
except ValueError:
|
||||
sub_deadline = None
|
||||
else:
|
||||
sub_deadline = None
|
||||
|
||||
is_suggested = bool(sub.get("suggested", False))
|
||||
|
||||
# Only save non-suggested steps now; suggested ones are opt-in from the client
|
||||
if not is_suggested:
|
||||
await pool.fetchrow(
|
||||
"""INSERT INTO steps (task_id, sort_order, title, description, estimated_minutes)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
RETURNING id""",
|
||||
task_id,
|
||||
sort_order,
|
||||
sub["title"],
|
||||
sub.get("description"),
|
||||
sub_est,
|
||||
)
|
||||
sort_order += 1
|
||||
|
||||
all_subtasks.append({
|
||||
"title": sub["title"],
|
||||
"description": sub.get("description"),
|
||||
"deadline": sub_deadline.isoformat() if sub_deadline else None,
|
||||
"estimated_minutes": sub_est,
|
||||
"suggested": is_suggested,
|
||||
})
|
||||
|
||||
saved_count = sum(1 for s in all_subtasks if not s["suggested"])
|
||||
await push.send_task_added(user_id, row["title"], step_count=saved_count)
|
||||
|
||||
parsed_tasks.append({
|
||||
"task_id": str(row["id"]),
|
||||
"title": row["title"],
|
||||
"description": row["description"],
|
||||
"priority": row["priority"],
|
||||
"deadline": row["deadline"],
|
||||
"estimated_minutes": row["estimated_minutes"],
|
||||
"tags": row["tags"] or [],
|
||||
"subtasks": all_subtasks,
|
||||
})
|
||||
|
||||
return BrainDumpResponse(
|
||||
parsed_tasks=parsed_tasks,
|
||||
unparseable_fragments=result.get("unparseable_fragments", []),
|
||||
ask_for_plans=True,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{task_id}/plan", response_model=PlanResponse)
|
||||
async def plan_task(task_id: UUID, req: PlanRequest, user_id: str = Depends(get_current_user_id)):
|
||||
pool = await get_pool()
|
||||
|
||||
task = await pool.fetchrow(
|
||||
"SELECT id, title, description, estimated_minutes FROM tasks WHERE id = $1 AND user_id = $2::uuid",
|
||||
task_id,
|
||||
user_id,
|
||||
)
|
||||
if not task:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
|
||||
steps_data = await llm.generate_step_plan(task["title"], task["description"], task["estimated_minutes"])
|
||||
|
||||
steps = []
|
||||
for s in steps_data:
|
||||
row = await pool.fetchrow(
|
||||
"""INSERT INTO steps (task_id, sort_order, title, description, estimated_minutes)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
RETURNING id, task_id, sort_order, title, description, estimated_minutes,
|
||||
status, checkpoint_note, last_checked_at, completed_at, created_at""",
|
||||
task_id,
|
||||
s["sort_order"],
|
||||
s["title"],
|
||||
s.get("description"),
|
||||
s.get("estimated_minutes"),
|
||||
)
|
||||
steps.append(StepOut(**dict(row)))
|
||||
|
||||
await pool.execute(
|
||||
"UPDATE tasks SET plan_type = $1, status = 'ready', updated_at = now() WHERE id = $2",
|
||||
req.plan_type,
|
||||
task_id,
|
||||
)
|
||||
await push.send_task_added(user_id, task["title"], step_count=len(steps))
|
||||
|
||||
return PlanResponse(task_id=task_id, plan_type=req.plan_type, steps=steps)
|
||||
|
||||
|
||||
@router.patch("/{task_id}", response_model=TaskOut)
|
||||
async def update_task(task_id: UUID, req: TaskUpdate, user_id: str = Depends(get_current_user_id)):
|
||||
pool = await get_pool()
|
||||
|
||||
fields = []
|
||||
values = []
|
||||
idx = 3 # $1 = task_id, $2 = user_id
|
||||
|
||||
update_data = req.model_dump(exclude_unset=True)
|
||||
for key, val in update_data.items():
|
||||
fields.append(f"{key} = ${idx}")
|
||||
values.append(val)
|
||||
idx += 1
|
||||
|
||||
if not fields:
|
||||
raise HTTPException(status_code=400, detail="No fields to update")
|
||||
|
||||
fields.append("updated_at = now()")
|
||||
set_clause = ", ".join(fields)
|
||||
|
||||
row = await pool.fetchrow(
|
||||
f"""UPDATE tasks SET {set_clause}
|
||||
WHERE id = $1 AND user_id = $2::uuid
|
||||
RETURNING {TASK_COLUMNS}""",
|
||||
task_id,
|
||||
user_id,
|
||||
*values,
|
||||
)
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
|
||||
return _row_to_task(row)
|
||||
|
||||
|
||||
@router.delete("/{task_id}", status_code=204)
|
||||
async def delete_task(task_id: UUID, user_id: str = Depends(get_current_user_id)):
|
||||
pool = await get_pool()
|
||||
result = await pool.execute(
|
||||
"UPDATE tasks SET status = 'deferred', updated_at = now() WHERE id = $1 AND user_id = $2::uuid",
|
||||
task_id,
|
||||
user_id,
|
||||
)
|
||||
if result == "UPDATE 0":
|
||||
raise HTTPException(status_code=404, detail="Task not found")
|
||||
Reference in New Issue
Block a user