mirror of
https://github.com/bnair123/MusicAnalyser.git
synced 2026-02-25 19:56:06 +00:00
- Refactor Database: Add `Artist` model, M2M relationship, and `AnalysisSnapshot` model. - Backend Services: Implement `StatsService` for computable metrics and `NarrativeService` for Gemini LLM integration. - Fix Ingestion: Correctly handle multiple artists per track and backfill existing data. - Testing: Add unit tests for statistics logic and live verification scripts. - Documentation: Add `PHASE_4_FRONTEND_GUIDE.md`.
83 lines
2.7 KiB
Python
83 lines
2.7 KiB
Python
import os
|
|
import sys
|
|
import json
|
|
from datetime import datetime, timedelta
|
|
from app.database import SessionLocal
|
|
from app.services.stats_service import StatsService
|
|
from app.services.narrative_service import NarrativeService
|
|
from app.models import AnalysisSnapshot
|
|
|
|
def run_analysis_pipeline(days: int = 30, model_name: str = "gemini-2.5-flash"):
|
|
db = SessionLocal()
|
|
try:
|
|
end_date = datetime.utcnow()
|
|
start_date = end_date - timedelta(days=days)
|
|
|
|
print(f"--- Starting Analysis for period: {start_date} to {end_date} ---")
|
|
|
|
# 1. Compute Stats
|
|
print("Calculating metrics...")
|
|
stats_service = StatsService(db)
|
|
stats_json = stats_service.generate_full_report(start_date, end_date)
|
|
|
|
# Check if we have enough data
|
|
if stats_json["volume"]["total_plays"] == 0:
|
|
print("No plays found in this period. Skipping LLM analysis.")
|
|
return
|
|
|
|
print(f"Stats computed. Total Plays: {stats_json['volume']['total_plays']}")
|
|
print(f"Top Artist: {stats_json['volume']['top_artists'][0]['name'] if stats_json['volume']['top_artists'] else 'N/A'}")
|
|
|
|
# 2. Generate Narrative
|
|
print(f"Generating Narrative with {model_name}...")
|
|
narrative_service = NarrativeService(model_name=model_name)
|
|
narrative_json = narrative_service.generate_narrative(stats_json)
|
|
|
|
if "error" in narrative_json:
|
|
print(f"LLM Error: {narrative_json['error']}")
|
|
else:
|
|
print("Narrative generated successfully.")
|
|
print(f"Persona: {narrative_json.get('persona')}")
|
|
|
|
# 3. Save Snapshot
|
|
print("Saving snapshot to database...")
|
|
snapshot = AnalysisSnapshot(
|
|
period_start=start_date,
|
|
period_end=end_date,
|
|
period_label=f"last_{days}_days",
|
|
metrics_payload=stats_json,
|
|
narrative_report=narrative_json,
|
|
model_used=model_name
|
|
)
|
|
db.add(snapshot)
|
|
db.commit()
|
|
print(f"Snapshot saved with ID: {snapshot.id}")
|
|
|
|
# 4. Output to file for easy inspection
|
|
output = {
|
|
"snapshot_id": snapshot.id,
|
|
"metrics": stats_json,
|
|
"narrative": narrative_json
|
|
}
|
|
with open("latest_analysis.json", "w") as f:
|
|
json.dump(output, f, indent=2)
|
|
print("Full report saved to latest_analysis.json")
|
|
|
|
except Exception as e:
|
|
print(f"Pipeline Failed: {e}")
|
|
import traceback
|
|
traceback.print_exc()
|
|
finally:
|
|
db.close()
|
|
|
|
if __name__ == "__main__":
|
|
# Allow arguments?
|
|
days = 30
|
|
if len(sys.argv) > 1:
|
|
try:
|
|
days = int(sys.argv[1])
|
|
except:
|
|
pass
|
|
|
|
run_analysis_pipeline(days=days)
|