Compare commits

...

No commits in common. "a1f33e2046ee2d79d92f46167bbc2aefc0edd64b" and "1d0042dccf2da2b9bf87b92e05ece83280b13aee" have entirely different histories.

18 changed files with 1 additions and 391 deletions

View file

@ -1,4 +0,0 @@
POSTGRES_USER=favs
POSTGRES_PASSWORD=favs
POSTGRES_DB=favs
ANTHROPIC_API_KEY=sk-ant-...

5
.gitignore vendored
View file

@ -1,5 +0,0 @@
__pycache__/
*.pyc
.venv/
.env
.DS_Store

111
README.md
View file

@ -1,111 +1,2 @@
# favs-my # test-repo
API de bookmarks personales con categorización automática via LLM.
## Stack
- **API:** FastAPI (Python 3.12)
- **DB:** PostgreSQL 16
- **LLM:** Claude (Haiku por defecto)
- **Infra:** Docker Compose
## Setup
```bash
cp .env.example .env
# editar .env con tu ANTHROPIC_API_KEY
docker compose up --build
```
La API queda en `http://localhost:8000`. La DB en el puerto `5433`.
## Uso
### Crear bookmark
```bash
curl -X POST http://localhost:8000/api/bookmarks \
-H "Content-Type: application/json" \
-d '{"title":"FastAPI docs","link":"https://fastapi.tiangolo.com"}'
```
### Listar todos
```bash
curl http://localhost:8000/api/bookmarks
```
### Filtrar por categoría
```bash
curl http://localhost:8000/api/bookmarks?category=python
```
### Obtener uno
```bash
curl http://localhost:8000/api/bookmarks/{id}
```
### Actualizar
```bash
curl -X PUT http://localhost:8000/api/bookmarks/{id} \
-H "Content-Type: application/json" \
-d '{"title":"Nuevo titulo"}'
```
### Eliminar
```bash
curl -X DELETE http://localhost:8000/api/bookmarks/{id}
```
### Categorizar pendientes (LLM)
```bash
curl -X POST http://localhost:8000/api/categorize
```
Toma los bookmarks sin categoría (`category: null`), los envía a Claude y asigna categorías automáticamente.
## Cron
Para categorizar automáticamente cada 30 minutos:
```bash
crontab -e
```
```
*/30 * * * * curl -s -X POST http://localhost:8000/api/categorize
```
## Variables de entorno
| Variable | Default | Descripción |
|---|---|---|
| `DATABASE_URL` | `postgresql+asyncpg://favs:favs@favs-db:5432/favs` | Conexión a PostgreSQL |
| `ANTHROPIC_API_KEY` | — | API key de Anthropic (requerida para categorizar) |
| `CATEGORIZE_MODEL` | `claude-haiku-4-5-20251001` | Modelo a usar para categorización |
## Estructura
```
├── docker-compose.yml
├── .env.example
└── backend/
├── Dockerfile
├── requirements.txt
└── app/
├── main.py # Entrypoint, lifespan, routers
├── config.py # Settings via env vars
├── database.py # Engine y sesión async
├── models.py # Modelo Bookmark (SQLAlchemy)
├── schemas.py # Pydantic schemas
├── categorizer.py # Lógica de categorización con LLM
└── routers/
├── bookmarks.py # CRUD /api/bookmarks
├── categorize.py # POST /api/categorize
└── health.py # GET /api/health
```

View file

@ -1,3 +0,0 @@
__pycache__
*.pyc
.git

View file

@ -1,6 +0,0 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

View file

@ -1,47 +0,0 @@
import json
import uuid as _uuid
import anthropic
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import settings
from app.models import Bookmark
SYSTEM_PROMPT = """You categorize bookmarks. Given a list of bookmarks (title + url),
assign each one a short category label (1-2 words, lowercase, e.g. "python", "devops", "design", "news", "ai/ml").
Respond with a JSON array of objects: [{"id": "...", "category": "..."}]
Only return the JSON, nothing else."""
async def categorize_pending(db: AsyncSession) -> int:
result = await db.execute(
select(Bookmark).where(Bookmark.category.is_(None)).limit(50)
)
bookmarks = result.scalars().all()
if not bookmarks:
return 0
items = [
{"id": str(b.id), "title": b.title, "link": b.link} for b in bookmarks
]
client = anthropic.Anthropic(api_key=settings.anthropic_api_key)
response = client.messages.create(
model=settings.categorize_model,
max_tokens=1024,
system=SYSTEM_PROMPT,
messages=[{"role": "user", "content": json.dumps(items)}],
)
categories = json.loads(response.content[0].text)
lookup = {b.id: b for b in bookmarks}
for entry in categories:
bookmark = lookup.get(_uuid.UUID(entry["id"]))
if bookmark and entry.get("category"):
bookmark.category = entry["category"]
await db.commit()
return len(categories)

View file

@ -1,12 +0,0 @@
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
database_url: str = "postgresql+asyncpg://favs:favs@favs-db:5432/favs"
anthropic_api_key: str = ""
categorize_model: str = "claude-haiku-4-5-20251001"
model_config = {"env_file": ".env"}
settings = Settings()

View file

@ -1,13 +0,0 @@
from collections.abc import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from app.config import settings
engine = create_async_engine(settings.database_url)
async_session = async_sessionmaker(engine, expire_on_commit=False)
async def get_db() -> AsyncGenerator[AsyncSession]:
async with async_session() as session:
yield session

View file

@ -1,20 +0,0 @@
from contextlib import asynccontextmanager
from fastapi import FastAPI
from app.database import engine
from app.models import Base
from app.routers import bookmarks, categorize, health
@asynccontextmanager
async def lifespan(app: FastAPI):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
yield
app = FastAPI(title="Favs API", lifespan=lifespan)
app.include_router(health.router)
app.include_router(bookmarks.router)
app.include_router(categorize.router)

View file

@ -1,21 +0,0 @@
import uuid
from sqlalchemy import DateTime, String, func
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class Base(DeclarativeBase):
pass
class Bookmark(Base):
__tablename__ = "bookmarks"
id: Mapped[uuid.UUID] = mapped_column(
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4
)
title: Mapped[str] = mapped_column(String(500), nullable=False)
link: Mapped[str] = mapped_column(String(2000), nullable=False)
category: Mapped[str | None] = mapped_column(String(100), nullable=True)
created_at = mapped_column(DateTime(timezone=True), server_default=func.now())

View file

@ -1,65 +0,0 @@
import uuid
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models import Bookmark
from app.schemas import BookmarkCreate, BookmarkResponse, BookmarkUpdate
router = APIRouter(prefix="/api/bookmarks", tags=["bookmarks"])
@router.get("/", response_model=list[BookmarkResponse])
async def list_bookmarks(
category: str | None = None, db: AsyncSession = Depends(get_db)
):
query = select(Bookmark).order_by(Bookmark.created_at.desc())
if category:
query = query.where(Bookmark.category == category)
result = await db.execute(query)
return result.scalars().all()
@router.get("/{bookmark_id}", response_model=BookmarkResponse)
async def get_bookmark(bookmark_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
result = await db.execute(select(Bookmark).where(Bookmark.id == bookmark_id))
bookmark = result.scalar_one_or_none()
if not bookmark:
raise HTTPException(status_code=404, detail="Bookmark not found")
return bookmark
@router.post("/", response_model=BookmarkResponse, status_code=201)
async def create_bookmark(data: BookmarkCreate, db: AsyncSession = Depends(get_db)):
bookmark = Bookmark(**data.model_dump())
db.add(bookmark)
await db.commit()
await db.refresh(bookmark)
return bookmark
@router.put("/{bookmark_id}", response_model=BookmarkResponse)
async def update_bookmark(
bookmark_id: uuid.UUID, data: BookmarkUpdate, db: AsyncSession = Depends(get_db)
):
result = await db.execute(select(Bookmark).where(Bookmark.id == bookmark_id))
bookmark = result.scalar_one_or_none()
if not bookmark:
raise HTTPException(status_code=404, detail="Bookmark not found")
for field, value in data.model_dump(exclude_unset=True).items():
setattr(bookmark, field, value)
await db.commit()
await db.refresh(bookmark)
return bookmark
@router.delete("/{bookmark_id}", status_code=204)
async def delete_bookmark(bookmark_id: uuid.UUID, db: AsyncSession = Depends(get_db)):
result = await db.execute(select(Bookmark).where(Bookmark.id == bookmark_id))
bookmark = result.scalar_one_or_none()
if not bookmark:
raise HTTPException(status_code=404, detail="Bookmark not found")
await db.delete(bookmark)
await db.commit()

View file

@ -1,13 +0,0 @@
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.categorizer import categorize_pending
from app.database import get_db
router = APIRouter(tags=["categorize"])
@router.post("/api/categorize")
async def run_categorize(db: AsyncSession = Depends(get_db)):
count = await categorize_pending(db)
return {"categorized": count}

View file

@ -1,8 +0,0 @@
from fastapi import APIRouter
router = APIRouter(tags=["health"])
@router.get("/api/health")
async def health():
return {"status": "ok"}

View file

@ -1,25 +0,0 @@
import uuid
from datetime import datetime
from pydantic import BaseModel
class BookmarkCreate(BaseModel):
title: str
link: str
class BookmarkUpdate(BaseModel):
title: str | None = None
link: str | None = None
category: str | None = None
class BookmarkResponse(BaseModel):
id: uuid.UUID
title: str
link: str
category: str | None
created_at: datetime
model_config = {"from_attributes": True}

View file

@ -1,6 +0,0 @@
fastapi==0.115.6
uvicorn[standard]==0.34.0
sqlalchemy[asyncio]==2.0.36
asyncpg==0.30.0
pydantic-settings==2.7.1
anthropic==0.43.0

View file

@ -1,33 +0,0 @@
services:
favs-db:
image: postgres:16-alpine
environment:
POSTGRES_USER: favs
POSTGRES_PASSWORD: favs
POSTGRES_DB: favs
ports:
- "5433:5432"
volumes:
- favs_pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U favs"]
interval: 5s
timeout: 3s
retries: 5
favs-api:
build: ./backend
ports:
- "8000:8000"
environment:
DATABASE_URL: postgresql+asyncpg://favs:favs@favs-db:5432/favs
ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY}
depends_on:
favs-db:
condition: service_healthy
volumes:
- ./backend:/app
command: uvicorn app.main:app --host 0.0.0.0 --port 8000 --reload
volumes:
favs_pgdata: