mirror of
https://github.com/bnair123/MusicAnalyser.git
synced 2026-02-25 11:46:07 +00:00
- Created FastAPI backend structure. - Implemented Spotify Recently Played ingestion logic. - Set up SQLite database with SQLAlchemy models. - Added AI Service using Google Gemini. - Created helper scripts for auth and background worker. - Added Dockerfile and GitHub Actions workflow.
37 lines
1.2 KiB
Python
37 lines
1.2 KiB
Python
from fastapi import FastAPI, Depends
|
|
from sqlalchemy.orm import Session
|
|
from .database import engine, Base, get_db
|
|
from .models import PlayHistory as PlayHistoryModel, Track as TrackModel
|
|
from . import schemas
|
|
from .ingest import ingest_recently_played
|
|
import asyncio
|
|
from typing import List
|
|
from dotenv import load_dotenv
|
|
|
|
load_dotenv()
|
|
|
|
# Create tables
|
|
Base.metadata.create_all(bind=engine)
|
|
|
|
app = FastAPI(title="Music Analyser Backend")
|
|
|
|
@app.get("/")
|
|
def read_root():
|
|
return {"status": "ok", "message": "Music Analyser API is running"}
|
|
|
|
@app.get("/history", response_model=List[schemas.PlayHistory])
|
|
def get_history(limit: int = 50, db: Session = Depends(get_db)):
|
|
history = db.query(PlayHistoryModel).order_by(PlayHistoryModel.played_at.desc()).limit(limit).all()
|
|
return history
|
|
|
|
@app.post("/trigger-ingest")
|
|
async def trigger_ingest(db: Session = Depends(get_db)):
|
|
"""Manually trigger the ingestion process (useful for testing)"""
|
|
await ingest_recently_played(db)
|
|
return {"status": "Ingestion triggered"}
|
|
|
|
@app.get("/tracks", response_model=List[schemas.Track])
|
|
def get_tracks(limit: int = 50, db: Session = Depends(get_db)):
|
|
tracks = db.query(TrackModel).limit(limit).all()
|
|
return tracks
|