Files
MusicAnalyser/backend/app/main.py
bnair123 272148c5bf feat: migrate to PostgreSQL and enhance playlist curation
- Migrate database from SQLite to PostgreSQL (100.91.248.114:5433)
- Fix playlist curation to use actual top tracks instead of AI name matching
- Add /playlists/history endpoint for historical playlist viewing
- Add Playlist Archives section to frontend with expandable history
- Add playlist-modify-* scopes to Spotify OAuth for playlist creation
- Rewrite Genius client to use official API (fixes 403 scraping blocks)
- Ensure playlists are created on Spotify before curation attempts
- Add DATABASE.md documentation for PostgreSQL schema
- Add migrations for PlaylistConfig and composition storage
2025-12-30 22:24:56 +04:00

370 lines
12 KiB
Python

import os
from fastapi import FastAPI, Depends, HTTPException, BackgroundTasks, Query
from sqlalchemy.orm import Session, joinedload
from datetime import datetime, timedelta
from typing import List, Optional
from dotenv import load_dotenv
from .database import engine, Base, get_db
from .models import (
PlayHistory as PlayHistoryModel,
Track as TrackModel,
AnalysisSnapshot,
PlaylistConfig,
)
from . import schemas
from .ingest import (
ingest_recently_played,
get_spotify_client,
get_reccobeats_client,
get_genius_client,
)
from .services.stats_service import StatsService
from .services.narrative_service import NarrativeService
from .services.playlist_service import PlaylistService
load_dotenv()
Base.metadata.create_all(bind=engine)
from fastapi.middleware.cors import CORSMiddleware
app = FastAPI(title="Music Analyser Backend")
app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:5173", "http://localhost:8991"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/")
def read_root():
return {"status": "ok", "message": "Music Analyser API is running"}
@app.get("/history", response_model=List[schemas.PlayHistory])
def get_history(limit: int = 50, db: Session = Depends(get_db)):
history = (
db.query(PlayHistoryModel)
.order_by(PlayHistoryModel.played_at.desc())
.limit(limit)
.all()
)
return history
@app.get("/tracks", response_model=List[schemas.Track])
def get_tracks(limit: int = 50, db: Session = Depends(get_db)):
tracks = db.query(TrackModel).limit(limit).all()
return tracks
@app.post("/trigger-ingest")
async def trigger_ingest(
background_tasks: BackgroundTasks, db: Session = Depends(get_db)
):
"""Triggers Spotify ingestion in the background."""
background_tasks.add_task(ingest_recently_played, db)
return {"status": "Ingestion started in background"}
@app.post("/trigger-analysis")
def trigger_analysis(
days: int = 30,
model_name: str = "gpt-5-mini-2025-08-07",
db: Session = Depends(get_db),
):
"""
Runs the full analysis pipeline (Stats + LLM) for the last X days.
Returns the computed metrics and narrative immediately.
"""
try:
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
# 1. Compute Stats
stats_service = StatsService(db)
stats_json = stats_service.generate_full_report(start_date, end_date)
if stats_json["volume"]["total_plays"] == 0:
raise HTTPException(
status_code=404, detail="No plays found in the specified period."
)
narrative_service = NarrativeService(model_name=model_name)
narrative_json = narrative_service.generate_full_narrative(stats_json)
# 3. Save Snapshot
snapshot = AnalysisSnapshot(
period_start=start_date,
period_end=end_date,
period_label=f"last_{days}_days",
metrics_payload=stats_json,
narrative_report=narrative_json,
model_used=model_name,
)
db.add(snapshot)
db.commit()
db.refresh(snapshot)
return {
"status": "success",
"snapshot_id": snapshot.id,
"period": {"start": start_date, "end": end_date},
"metrics": stats_json,
"narrative": narrative_json,
}
except HTTPException:
raise # Re-raise HTTPExceptions as-is (404, etc.)
except Exception as e:
print(f"Analysis Failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/snapshots")
def get_snapshots(limit: int = 10, db: Session = Depends(get_db)):
return (
db.query(AnalysisSnapshot)
.order_by(AnalysisSnapshot.date.desc())
.limit(limit)
.all()
)
@app.get("/listening-log")
def get_listening_log(
days: int = Query(default=7, ge=1, le=365),
limit: int = Query(default=200, ge=1, le=1000),
db: Session = Depends(get_db),
):
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
plays = (
db.query(PlayHistoryModel)
.options(joinedload(PlayHistoryModel.track))
.filter(
PlayHistoryModel.played_at >= start_date,
PlayHistoryModel.played_at <= end_date,
)
.order_by(PlayHistoryModel.played_at.desc())
.limit(limit)
.all()
)
result = []
for i, play in enumerate(plays):
track = play.track
listened_ms = play.listened_ms
skipped = play.skipped
if listened_ms is None and i < len(plays) - 1:
next_play = plays[i + 1]
diff_seconds = (play.played_at - next_play.played_at).total_seconds()
if track and track.duration_ms:
duration_sec = track.duration_ms / 1000.0
listened_ms = int(min(diff_seconds, duration_sec) * 1000)
skipped = diff_seconds < 30
result.append(
{
"id": play.id,
"track_id": play.track_id,
"track_name": track.name if track else "Unknown",
"artist": track.artist if track else "Unknown",
"album": track.album if track else "Unknown",
"image": track.image_url if track else None,
"played_at": play.played_at.isoformat(),
"duration_ms": track.duration_ms if track else 0,
"listened_ms": listened_ms,
"skipped": skipped,
"context_uri": play.context_uri,
"source": play.source,
}
)
return {
"plays": result,
"period": {"start": start_date.isoformat(), "end": end_date.isoformat()},
}
@app.get("/sessions")
def get_sessions(
days: int = Query(default=7, ge=1, le=365), db: Session = Depends(get_db)
):
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
stats_service = StatsService(db)
session_stats = stats_service.compute_session_stats(start_date, end_date)
return {
"sessions": session_stats.get("session_list", []),
"summary": {
"count": session_stats.get("count", 0),
"avg_minutes": session_stats.get("avg_minutes", 0),
"micro_rate": session_stats.get("micro_session_rate", 0),
"marathon_rate": session_stats.get("marathon_session_rate", 0),
},
}
@app.post("/playlists/refresh/six-hour")
async def refresh_six_hour_playlist(db: Session = Depends(get_db)):
"""Triggers a 6-hour themed playlist refresh."""
try:
end_date = datetime.utcnow()
start_date = end_date - timedelta(hours=6)
spotify_client = get_spotify_client()
playlist_service = PlaylistService(
db=db,
spotify_client=spotify_client,
recco_client=get_reccobeats_client(),
narrative_service=NarrativeService(),
)
# Ensure playlists exist (creates on Spotify if needed)
user_id = await spotify_client.get_current_user_id()
await playlist_service.ensure_playlists_exist(user_id)
result = await playlist_service.curate_six_hour_playlist(start_date, end_date)
snapshot = AnalysisSnapshot(
date=datetime.utcnow(),
period_start=start_date,
period_end=end_date,
period_label="6h_refresh",
metrics_payload={},
narrative_report={},
playlist_theme=result.get("theme_name"),
playlist_theme_reasoning=result.get("description"),
six_hour_playlist_id=result.get("playlist_id"),
playlist_composition=result.get("composition"),
)
db.add(snapshot)
db.commit()
return result
except Exception as e:
print(f"Playlist Refresh Failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/playlists/refresh/daily")
async def refresh_daily_playlist(db: Session = Depends(get_db)):
"""Triggers a 24-hour daily playlist refresh."""
try:
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=1)
spotify_client = get_spotify_client()
playlist_service = PlaylistService(
db=db,
spotify_client=spotify_client,
recco_client=get_reccobeats_client(),
narrative_service=NarrativeService(),
)
# Ensure playlists exist (creates on Spotify if needed)
user_id = await spotify_client.get_current_user_id()
await playlist_service.ensure_playlists_exist(user_id)
result = await playlist_service.curate_daily_playlist(start_date, end_date)
snapshot = AnalysisSnapshot(
date=datetime.utcnow(),
period_start=start_date,
period_end=end_date,
period_label="24h_refresh",
metrics_payload={},
narrative_report={},
daily_playlist_id=result.get("playlist_id"),
playlist_composition=result.get("composition"),
)
db.add(snapshot)
db.commit()
return result
except Exception as e:
print(f"Daily Playlist Refresh Failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@app.get("/playlists")
async def get_playlists_metadata(db: Session = Depends(get_db)):
"""Returns metadata for the managed playlists."""
six_hour_config = (
db.query(PlaylistConfig).filter(PlaylistConfig.key == "six_hour").first()
)
daily_config = (
db.query(PlaylistConfig).filter(PlaylistConfig.key == "daily").first()
)
return {
"six_hour": {
"id": six_hour_config.spotify_id
if six_hour_config
else os.getenv("SIX_HOUR_PLAYLIST_ID"),
"theme": six_hour_config.current_theme if six_hour_config else "N/A",
"reasoning": six_hour_config.description if six_hour_config else "N/A",
"last_refresh": six_hour_config.last_updated.isoformat()
if six_hour_config
else None,
"composition": six_hour_config.composition if six_hour_config else [],
},
"daily": {
"id": daily_config.spotify_id
if daily_config
else os.getenv("DAILY_PLAYLIST_ID"),
"theme": daily_config.current_theme if daily_config else "N/A",
"reasoning": daily_config.description if daily_config else "N/A",
"last_refresh": daily_config.last_updated.isoformat()
if daily_config
else None,
"composition": daily_config.composition if daily_config else [],
},
}
@app.get("/playlists/history")
def get_playlist_history(
limit: int = Query(default=20, ge=1, le=100),
db: Session = Depends(get_db),
):
"""Returns historical playlist snapshots."""
snapshots = (
db.query(AnalysisSnapshot)
.filter(
(AnalysisSnapshot.playlist_theme.isnot(None))
| (AnalysisSnapshot.six_hour_playlist_id.isnot(None))
| (AnalysisSnapshot.daily_playlist_id.isnot(None))
)
.order_by(AnalysisSnapshot.date.desc())
.limit(limit)
.all()
)
result = []
for snap in snapshots:
result.append(
{
"id": snap.id,
"date": snap.date.isoformat() if snap.date else None,
"period_label": snap.period_label,
"theme": snap.playlist_theme,
"reasoning": snap.playlist_theme_reasoning,
"six_hour_id": snap.six_hour_playlist_id,
"daily_id": snap.daily_playlist_id,
"composition": snap.playlist_composition or [],
}
)
return {"history": result}