Files
MusicAnalyser/backend/app/main.py
2025-12-25 17:48:41 +04:00

94 lines
3.3 KiB
Python

from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks
from sqlalchemy.orm import Session
from datetime import datetime, timedelta
from typing import List, Optional
from dotenv import load_dotenv
from .database import engine, Base, get_db
from .models import PlayHistory as PlayHistoryModel, Track as TrackModel, AnalysisSnapshot
from . import schemas
from .ingest import ingest_recently_played
from .services.stats_service import StatsService
from .services.narrative_service import NarrativeService
load_dotenv()
# Create tables
Base.metadata.create_all(bind=engine)
app = FastAPI(title="Music Analyser Backend")
@app.get("/")
def read_root():
return {"status": "ok", "message": "Music Analyser API is running"}
@app.get("/history", response_model=List[schemas.PlayHistory])
def get_history(limit: int = 50, db: Session = Depends(get_db)):
history = db.query(PlayHistoryModel).order_by(PlayHistoryModel.played_at.desc()).limit(limit).all()
return history
@app.get("/tracks", response_model=List[schemas.Track])
def get_tracks(limit: int = 50, db: Session = Depends(get_db)):
tracks = db.query(TrackModel).limit(limit).all()
return tracks
@app.post("/trigger-ingest")
async def trigger_ingest(background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
"""Triggers Spotify ingestion in the background."""
background_tasks.add_task(ingest_recently_played, db)
return {"status": "Ingestion started in background"}
@app.post("/trigger-analysis")
def trigger_analysis(
days: int = 30,
model_name: str = "gemini-2.5-flash",
db: Session = Depends(get_db)
):
"""
Runs the full analysis pipeline (Stats + LLM) for the last X days.
Returns the computed metrics and narrative immediately.
"""
try:
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
# 1. Compute Stats
stats_service = StatsService(db)
stats_json = stats_service.generate_full_report(start_date, end_date)
if stats_json["volume"]["total_plays"] == 0:
raise HTTPException(status_code=404, detail="No plays found in the specified period.")
# 2. Generate Narrative
narrative_service = NarrativeService(model_name=model_name)
narrative_json = narrative_service.generate_narrative(stats_json)
# 3. Save Snapshot
snapshot = AnalysisSnapshot(
period_start=start_date,
period_end=end_date,
period_label=f"last_{days}_days",
metrics_payload=stats_json,
narrative_report=narrative_json,
model_used=model_name
)
db.add(snapshot)
db.commit()
db.refresh(snapshot)
return {
"status": "success",
"snapshot_id": snapshot.id,
"period": {"start": start_date, "end": end_date},
"metrics": stats_json,
"narrative": narrative_json
}
except Exception as e:
print(f"Analysis Failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/snapshots")
def get_snapshots(limit: int = 10, db: Session = Depends(get_db)):
"""Retrieve past analysis snapshots."""
return db.query(AnalysisSnapshot).order_by(AnalysisSnapshot.date.desc()).limit(limit).all()