Files
MusicAnalyser/backend/app/database.py
bnair123 272148c5bf feat: migrate to PostgreSQL and enhance playlist curation
- Migrate database from SQLite to PostgreSQL (100.91.248.114:5433)
- Fix playlist curation to use actual top tracks instead of AI name matching
- Add /playlists/history endpoint for historical playlist viewing
- Add Playlist Archives section to frontend with expandable history
- Add playlist-modify-* scopes to Spotify OAuth for playlist creation
- Rewrite Genius client to use official API (fixes 403 scraping blocks)
- Ensure playlists are created on Spotify before curation attempts
- Add DATABASE.md documentation for PostgreSQL schema
- Add migrations for PlaylistConfig and composition storage
2025-12-30 22:24:56 +04:00

38 lines
1.2 KiB
Python

import os
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, declarative_base
# PostgreSQL connection configuration
# Uses docker hostname 'music_db' when running in container, falls back to external IP for local dev
POSTGRES_HOST = os.getenv("POSTGRES_HOST", "music_db")
POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432")
POSTGRES_USER = os.getenv("POSTGRES_USER", "bnair")
POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", "Bharath2002")
POSTGRES_DB = os.getenv("POSTGRES_DB", "music_db")
# Build the PostgreSQL URL
# Format: postgresql://user:password@host:port/database
SQLALCHEMY_DATABASE_URL = os.getenv(
"DATABASE_URL",
f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@{POSTGRES_HOST}:{POSTGRES_PORT}/{POSTGRES_DB}",
)
# PostgreSQL connection pool settings for production
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
pool_size=5, # Maintain 5 connections in the pool
max_overflow=10, # Allow up to 10 additional connections
pool_pre_ping=True, # Verify connection health before using
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()