mirror of
https://github.com/bnair123/MusicAnalyser.git
synced 2026-02-25 11:46:07 +00:00
Fixed and added all the stats_service.py methods
This commit is contained in:
@@ -1,12 +1,15 @@
|
||||
from fastapi import FastAPI, Depends
|
||||
from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from .database import engine, Base, get_db
|
||||
from .models import PlayHistory as PlayHistoryModel, Track as TrackModel
|
||||
from .models import PlayHistory as PlayHistoryModel, Track as TrackModel, AnalysisSnapshot
|
||||
from . import schemas
|
||||
from .ingest import ingest_recently_played
|
||||
import asyncio
|
||||
from typing import List
|
||||
from dotenv import load_dotenv
|
||||
from .services.stats_service import StatsService
|
||||
from .services.narrative_service import NarrativeService
|
||||
|
||||
load_dotenv()
|
||||
|
||||
@@ -24,13 +27,68 @@ def get_history(limit: int = 50, db: Session = Depends(get_db)):
|
||||
history = db.query(PlayHistoryModel).order_by(PlayHistoryModel.played_at.desc()).limit(limit).all()
|
||||
return history
|
||||
|
||||
@app.post("/trigger-ingest")
|
||||
async def trigger_ingest(db: Session = Depends(get_db)):
|
||||
"""Manually trigger the ingestion process (useful for testing)"""
|
||||
await ingest_recently_played(db)
|
||||
return {"status": "Ingestion triggered"}
|
||||
|
||||
@app.get("/tracks", response_model=List[schemas.Track])
|
||||
def get_tracks(limit: int = 50, db: Session = Depends(get_db)):
|
||||
tracks = db.query(TrackModel).limit(limit).all()
|
||||
return tracks
|
||||
|
||||
@app.post("/trigger-ingest")
|
||||
async def trigger_ingest(background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
|
||||
"""Triggers Spotify ingestion in the background."""
|
||||
background_tasks.add_task(ingest_recently_played, db)
|
||||
return {"status": "Ingestion started in background"}
|
||||
|
||||
@app.post("/trigger-analysis")
|
||||
def trigger_analysis(
|
||||
days: int = 30,
|
||||
model_name: str = "gemini-2.5-flash",
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Runs the full analysis pipeline (Stats + LLM) for the last X days.
|
||||
Returns the computed metrics and narrative immediately.
|
||||
"""
|
||||
try:
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=days)
|
||||
|
||||
# 1. Compute Stats
|
||||
stats_service = StatsService(db)
|
||||
stats_json = stats_service.generate_full_report(start_date, end_date)
|
||||
|
||||
if stats_json["volume"]["total_plays"] == 0:
|
||||
raise HTTPException(status_code=404, detail="No plays found in the specified period.")
|
||||
|
||||
# 2. Generate Narrative
|
||||
narrative_service = NarrativeService(model_name=model_name)
|
||||
narrative_json = narrative_service.generate_narrative(stats_json)
|
||||
|
||||
# 3. Save Snapshot
|
||||
snapshot = AnalysisSnapshot(
|
||||
period_start=start_date,
|
||||
period_end=end_date,
|
||||
period_label=f"last_{days}_days",
|
||||
metrics_payload=stats_json,
|
||||
narrative_report=narrative_json,
|
||||
model_used=model_name
|
||||
)
|
||||
db.add(snapshot)
|
||||
db.commit()
|
||||
db.refresh(snapshot)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"snapshot_id": snapshot.id,
|
||||
"period": {"start": start_date, "end": end_date},
|
||||
"metrics": stats_json,
|
||||
"narrative": narrative_json
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Analysis Failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/snapshots")
|
||||
def get_snapshots(limit: int = 10, db: Session = Depends(get_db)):
|
||||
"""Retrieve past analysis snapshots."""
|
||||
return db.query(AnalysisSnapshot).order_by(AnalysisSnapshot.date.desc()).limit(limit).all()
|
||||
Reference in New Issue
Block a user