Refactored everything
This commit is contained in:
444
src/smart_encoder.py
Normal file
444
src/smart_encoder.py
Normal file
@@ -0,0 +1,444 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Smart Video Encoder - AV1 with HEVC Fallback
|
||||
Runs on Windows with native ab-av1.exe and ffmpeg
|
||||
Refactored to use vmaf_common.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import json
|
||||
import shutil
|
||||
import time
|
||||
import argparse
|
||||
import signal
|
||||
import platform
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
import vmaf_common as common
|
||||
|
||||
# --- Configuration ---
|
||||
TARGET_VMAF_MIN = common.DEFAULT_CONFIG["target_vmaf"]
|
||||
MIN_SAVINGS_PERCENT = common.DEFAULT_CONFIG["min_savings"]
|
||||
MAX_JOBS = common.DEFAULT_CONFIG["cpu_jobs"]
|
||||
|
||||
AV1_QUALITY = common.DEFAULT_CONFIG["av1_crf"]
|
||||
HEVC_QUALITY = common.DEFAULT_CONFIG["hevc_crf"]
|
||||
|
||||
SAMPLE_DURATION = 60
|
||||
SAMPLE_START = 300
|
||||
MAX_SAMPLE_SIZE_MB = 150
|
||||
|
||||
TEMP_DIR = common.get_temp_dir()
|
||||
|
||||
# Tools
|
||||
FFMPEG_BIN = "ffmpeg"
|
||||
AB_AV1_BIN = "ab-av1"
|
||||
|
||||
# Global state
|
||||
_shutdown_requested = False
|
||||
_lock_files = {}
|
||||
|
||||
def signal_handler(signum, frame):
|
||||
global _shutdown_requested
|
||||
_shutdown_requested = True
|
||||
print("\n\n[WARNING] Shutdown requested. Finishing current tasks...")
|
||||
|
||||
if platform.system() != "Windows":
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
def run_command_streaming(cmd, description=""):
|
||||
"""Run command and stream output in real-time"""
|
||||
print(f" ▶ Running: {description}")
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
bufsize=1
|
||||
)
|
||||
|
||||
output_lines = []
|
||||
if process.stdout:
|
||||
for line in process.stdout:
|
||||
if _shutdown_requested:
|
||||
process.terminate()
|
||||
break
|
||||
line = line.rstrip()
|
||||
print(f" {line}")
|
||||
output_lines.append(line)
|
||||
|
||||
process.wait()
|
||||
return process.returncode, "\n".join(output_lines)
|
||||
|
||||
|
||||
def test_av1_sample(filepath, output_path):
|
||||
"""Test AV1 encoding on a sample segment"""
|
||||
print(f" 🧪 Testing AV1 with {SAMPLE_DURATION}s sample...")
|
||||
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-hide_banner",
|
||||
"-loglevel", "warning",
|
||||
"-ss", str(SAMPLE_START),
|
||||
"-i", str(filepath),
|
||||
"-t", str(SAMPLE_DURATION),
|
||||
"-c:v", "libsvtav1",
|
||||
"-crf", str(AV1_QUALITY),
|
||||
"-preset", "6",
|
||||
"-c:a", "copy",
|
||||
"-c:s", "copy",
|
||||
"-y",
|
||||
str(output_path)
|
||||
]
|
||||
|
||||
returncode, output = run_command_streaming(cmd, f"AV1 sample test")
|
||||
return returncode == 0 and output_path.exists()
|
||||
|
||||
|
||||
def encode_av1_full(filepath, output_path):
|
||||
"""Full AV1 encode using ab-av1"""
|
||||
print(f" 🎬 Full AV1 encode...")
|
||||
|
||||
cmd = [
|
||||
"ab-av1", "encode",
|
||||
"-i", str(filepath),
|
||||
"-o", str(output_path),
|
||||
"--crf", str(AV1_QUALITY),
|
||||
"--preset", "6",
|
||||
"--acodec", "copy"
|
||||
]
|
||||
|
||||
return run_command_streaming(cmd, f"AV1 full encode")[0] == 0
|
||||
|
||||
|
||||
def encode_hevc_full(filepath, output_path):
|
||||
"""Full HEVC encode using ffmpeg"""
|
||||
print(f" 🎬 Full HEVC encode...")
|
||||
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-hide_banner",
|
||||
"-loglevel", "warning",
|
||||
"-i", str(filepath),
|
||||
"-c:v", "libx265",
|
||||
"-crf", str(HEVC_QUALITY),
|
||||
"-preset", "medium",
|
||||
"-c:a", "copy",
|
||||
"-c:s", "copy",
|
||||
"-y",
|
||||
str(output_path)
|
||||
]
|
||||
|
||||
return run_command_streaming(cmd, f"HEVC full encode")[0] == 0
|
||||
|
||||
|
||||
def process_file(filepath, log_dir, log_category, lock_dir):
|
||||
"""Process a single video file with AV1→HEVC fallback"""
|
||||
filepath = Path(filepath)
|
||||
filename = filepath.name
|
||||
|
||||
# Check lock (using shared logic now)
|
||||
lock_file = common.acquire_lock(lock_dir, filepath)
|
||||
if not lock_file:
|
||||
print(f" 🔒 Skipping (locked): {filename}")
|
||||
return True
|
||||
|
||||
try:
|
||||
print(f"\n{'='*60}")
|
||||
print(f"📁 Processing: {filename}")
|
||||
print(f"{'='*60}")
|
||||
|
||||
# Get initial metadata
|
||||
metadata_before = common.get_video_info(filepath)
|
||||
if not metadata_before:
|
||||
print(f"❌ Could not read metadata, skipping")
|
||||
return False
|
||||
|
||||
print(f" 📊 Original:")
|
||||
print(f" Codec: {metadata_before['codec']}")
|
||||
print(f" Size: {metadata_before['size'] / (1024**3):.2f} GB")
|
||||
print(f" Bitrate: {metadata_before['bitrate']} kbps")
|
||||
print(f" Duration: {metadata_before['duration'] / 60:.1f} min")
|
||||
|
||||
# Skip if already AV1 or HEVC
|
||||
if metadata_before['codec'] in ['av1', 'hevc']:
|
||||
print(f" ℹ️ Already optimized ({metadata_before['codec']}), skipping")
|
||||
# We don't log skips to main log to avoid clutter, but could if needed
|
||||
return True
|
||||
|
||||
input_size = metadata_before['size']
|
||||
|
||||
# --- PHASE 1: AV1 SAMPLE TEST ---
|
||||
sample_output = TEMP_DIR / f"{filepath.stem}.sample.mkv"
|
||||
use_av1 = True
|
||||
|
||||
try:
|
||||
av1_test_passed = test_av1_sample(filepath, sample_output)
|
||||
|
||||
if av1_test_passed:
|
||||
sample_size = sample_output.stat().st_size
|
||||
sample_size_mb = sample_size / (1024 * 1024)
|
||||
|
||||
print(f" 📏 Sample size: {sample_size_mb:.1f} MB")
|
||||
|
||||
# Extrapolate to full file size
|
||||
duration_ratio = metadata_before['duration'] / SAMPLE_DURATION
|
||||
estimated_full_size = sample_size * duration_ratio
|
||||
estimated_full_gb = estimated_full_size / (1024**3)
|
||||
input_gb = input_size / (1024**3)
|
||||
|
||||
print(f" 📈 Estimated full size: {estimated_full_gb:.2f} GB")
|
||||
print(f" 📉 Original size: {input_gb:.2f} GB")
|
||||
|
||||
if sample_size_mb > MAX_SAMPLE_SIZE_MB:
|
||||
print(f" ❌ AV1 REJECTED: Sample too large ({sample_size_mb:.1f} MB > {MAX_SAMPLE_SIZE_MB} MB)")
|
||||
use_av1 = False
|
||||
elif estimated_full_size >= input_size:
|
||||
print(f" ❌ AV1 REJECTED: Estimated size ({estimated_full_gb:.2f} GB) >= original ({input_gb:.2f} GB)")
|
||||
use_av1 = False
|
||||
else:
|
||||
estimated_savings = (1 - estimated_full_size / input_size) * 100
|
||||
print(f" ✅ AV1 PASS: Estimated savings {estimated_savings:.1f}%")
|
||||
else:
|
||||
print(f" ❌ AV1 sample test failed")
|
||||
use_av1 = False
|
||||
|
||||
if sample_output.exists():
|
||||
sample_output.unlink()
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ AV1 test error: {e}")
|
||||
use_av1 = False
|
||||
|
||||
# --- PHASE 2: ENCODE ---
|
||||
temp_output = TEMP_DIR / f"{filepath.stem}.temp.mkv"
|
||||
final_status = "rejected"
|
||||
final_codec = None
|
||||
final_size = input_size
|
||||
final_savings = 0.0
|
||||
|
||||
if use_av1:
|
||||
# Try AV1 full encode
|
||||
if encode_av1_full(filepath, temp_output):
|
||||
metadata_after = common.get_video_info(temp_output)
|
||||
if metadata_after:
|
||||
final_size = metadata_after['size']
|
||||
final_savings = (1 - final_size / input_size) * 100
|
||||
|
||||
if final_size < input_size:
|
||||
final_status = "success"
|
||||
final_codec = "av1"
|
||||
print(f" ✅ AV1 SUCCESS: Saved {final_savings:.1f}%")
|
||||
else:
|
||||
print(f" ❌ AV1 FAILED: Final size >= original")
|
||||
if temp_output.exists(): temp_output.unlink()
|
||||
|
||||
# Fall back to HEVC
|
||||
print(f" 🔄 Trying HEVC fallback...")
|
||||
if encode_hevc_full(filepath, temp_output):
|
||||
metadata_after = common.get_video_info(temp_output)
|
||||
if metadata_after:
|
||||
final_size = metadata_after['size']
|
||||
final_savings = (1 - final_size / input_size) * 100
|
||||
if final_size < input_size:
|
||||
final_status = "success"
|
||||
final_codec = "hevc"
|
||||
print(f" ✅ HEVC SUCCESS: Saved {final_savings:.1f}%")
|
||||
else:
|
||||
print(f" ❌ HEVC FAILED: Also larger than original")
|
||||
if temp_output.exists(): temp_output.unlink()
|
||||
else:
|
||||
print(f" ❌ AV1 encode failed, trying HEVC...")
|
||||
if encode_hevc_full(filepath, temp_output):
|
||||
metadata_after = common.get_video_info(temp_output)
|
||||
if metadata_after:
|
||||
final_size = metadata_after['size']
|
||||
final_savings = (1 - final_size / input_size) * 100
|
||||
if final_size < input_size:
|
||||
final_status = "success"
|
||||
final_codec = "hevc"
|
||||
print(f" ✅ HEVC SUCCESS: Saved {final_savings:.1f}%")
|
||||
else:
|
||||
print(f" ❌ HEVC FAILED: Larger than original")
|
||||
if temp_output.exists(): temp_output.unlink()
|
||||
else:
|
||||
# AV1 test failed, try HEVC directly
|
||||
print(f" 🔄 Trying HEVC directly...")
|
||||
if encode_hevc_full(filepath, temp_output):
|
||||
metadata_after = common.get_video_info(temp_output)
|
||||
if metadata_after:
|
||||
final_size = metadata_after['size']
|
||||
final_savings = (1 - final_size / input_size) * 100
|
||||
if final_size < input_size:
|
||||
final_status = "success"
|
||||
final_codec = "hevc"
|
||||
print(f" ✅ HEVC SUCCESS: Saved {final_savings:.1f}%")
|
||||
else:
|
||||
print(f" ❌ HEVC FAILED: Larger than original")
|
||||
if temp_output.exists(): temp_output.unlink()
|
||||
|
||||
# --- PHASE 3: FINALIZE ---
|
||||
if final_status == "success":
|
||||
# Replace original file (Safe Upload)
|
||||
if filepath.suffix:
|
||||
backup_path = filepath.with_suffix(f"{filepath.suffix}.backup")
|
||||
else:
|
||||
backup_path = Path(str(filepath) + ".backup")
|
||||
|
||||
shutil.move(str(filepath), str(backup_path))
|
||||
shutil.move(str(temp_output), str(filepath))
|
||||
|
||||
# Verify Integrity
|
||||
if Path(filepath).stat().st_size == final_size:
|
||||
backup_path.unlink()
|
||||
metadata_after = common.get_video_info(filepath)
|
||||
|
||||
common.log_event(log_dir, f"{log_category}.jsonl", {
|
||||
"file": str(filepath),
|
||||
"status": "success",
|
||||
"codec": final_codec,
|
||||
"input_size": input_size,
|
||||
"output_size": final_size,
|
||||
"savings_percent": final_savings,
|
||||
"metadata_before": metadata_before,
|
||||
"metadata_after": metadata_after
|
||||
})
|
||||
|
||||
print(f"\n ✅ SUCCESS: Optimized with {final_codec.upper() if final_codec else 'unknown'}")
|
||||
print(f" Savings: {final_savings:.1f}% ({input_size / (1024**3):.2f} GB → {final_size / (1024**3):.2f} GB)")
|
||||
else:
|
||||
print(" ❌ Critical Error: Copied file size mismatch! Restoring backup.")
|
||||
shutil.move(str(backup_path), str(filepath))
|
||||
|
||||
else:
|
||||
common.log_event(log_dir, "rejected.jsonl", {
|
||||
"file": str(filepath),
|
||||
"status": "rejected",
|
||||
"reason": "both_codecs_larger_than_original",
|
||||
"input_size": input_size,
|
||||
"metadata": metadata_before
|
||||
})
|
||||
|
||||
print(f"\n ❌ REJECTED: Both AV1 and HEVC larger than original")
|
||||
print(f" Keeping original file ({input_size / (1024**3):.2f} GB)")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error processing {filename}: {e}")
|
||||
return False
|
||||
|
||||
finally:
|
||||
# Release shared lock
|
||||
if lock_file and lock_file.exists():
|
||||
lock_file.unlink()
|
||||
|
||||
def scan_directory(directory, extensions=None):
|
||||
"""Scan directory for video files"""
|
||||
if extensions is None:
|
||||
extensions = {".mkv", ".mp4", ".mov", ".avi", ".ts"}
|
||||
|
||||
files = []
|
||||
for dirpath, dirnames, filenames in os.walk(directory):
|
||||
# Skip processed/system files
|
||||
dirnames[:] = [d for d in dirnames if not d.startswith("_") and d not in [".recycle", "@eaDir"]]
|
||||
|
||||
for filename in filenames:
|
||||
filepath = Path(dirpath) / filename
|
||||
if filepath.suffix.lower() in extensions:
|
||||
if "_av1" in filepath.stem.lower() or "_hevc" in filepath.stem.lower():
|
||||
continue
|
||||
files.append(filepath)
|
||||
|
||||
return sorted(files)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Smart Video Encoder - AV1 with HEVC Fallback")
|
||||
parser.add_argument("--tv-dir", default=common.DEFAULT_CONFIG["tv_dir"], help="TV directory")
|
||||
parser.add_argument("--content-dir", default=common.DEFAULT_CONFIG["content_dir"], help="Content directory")
|
||||
parser.add_argument("--jobs", type=int, default=MAX_JOBS, help="Parallel jobs")
|
||||
parser.add_argument("--tv-only", action="store_true", help="Process TV only")
|
||||
parser.add_argument("--content-only", action="store_true", help="Process content only")
|
||||
args = parser.parse_args()
|
||||
|
||||
print("="*60)
|
||||
print("🎬 Smart Video Encoder - AV1 with HEVC Fallback")
|
||||
print("="*60)
|
||||
|
||||
# Setup
|
||||
common.check_dependencies()
|
||||
lock_dir, log_dir = common.get_base_paths(args)
|
||||
TEMP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
print(f"\n📁 Configuration:")
|
||||
print(f" TV Directory: {args.tv_dir}")
|
||||
print(f" Content Directory: {args.content_dir}")
|
||||
print(f" Parallel Jobs: {args.jobs}")
|
||||
print(f" Locks: {lock_dir}")
|
||||
print(f" Logs: {log_dir}")
|
||||
print()
|
||||
|
||||
tasks = []
|
||||
if not args.content_only:
|
||||
tv_dir = Path(args.tv_dir)
|
||||
if tv_dir.exists():
|
||||
tv_files = scan_directory(tv_dir)
|
||||
print(f"📺 TV Files: {len(tv_files)}")
|
||||
for f in tv_files:
|
||||
tasks.append((f, log_dir, "tv_shows", lock_dir))
|
||||
|
||||
if not args.tv_only:
|
||||
content_dir = Path(args.content_dir)
|
||||
if content_dir.exists():
|
||||
content_files = scan_directory(content_dir)
|
||||
print(f"📦 Content Files: {len(content_files)}")
|
||||
for f in content_files:
|
||||
tasks.append((f, log_dir, "content", lock_dir))
|
||||
|
||||
if not tasks:
|
||||
print("❌ No files to process")
|
||||
return
|
||||
|
||||
print(f"\n🚀 Processing {len(tasks)} files...")
|
||||
print("="*60)
|
||||
|
||||
success_count = 0
|
||||
fail_count = 0
|
||||
|
||||
with ThreadPoolExecutor(max_workers=args.jobs) as executor:
|
||||
futures = {}
|
||||
for item in tasks:
|
||||
# item = (filepath, log_dir, log_category, lock_dir)
|
||||
future = executor.submit(process_file, *item)
|
||||
futures[future] = item[0]
|
||||
|
||||
for future in as_completed(futures):
|
||||
if _shutdown_requested:
|
||||
break
|
||||
|
||||
filepath = futures[future]
|
||||
try:
|
||||
result = future.result()
|
||||
if result:
|
||||
success_count += 1
|
||||
else:
|
||||
fail_count += 1
|
||||
except Exception as e:
|
||||
print(f"❌ Error processing {filepath}: {e}")
|
||||
fail_count += 1
|
||||
|
||||
print("\n" + "="*60)
|
||||
print("📊 Summary:")
|
||||
print(f" Processed: {success_count + fail_count}")
|
||||
print(f" ✅ Success: {success_count}")
|
||||
print(f" ❌ Failed: {fail_count}")
|
||||
print("="*60)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
504
src/smart_gpu_encoder.py
Normal file
504
src/smart_gpu_encoder.py
Normal file
@@ -0,0 +1,504 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Smart GPU Encoder (Windows/AMD/NVIDIA Optimized)
|
||||
------------------------------------------------
|
||||
Refactored to use vmaf_common.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import json
|
||||
import shutil
|
||||
import time
|
||||
import argparse
|
||||
import signal
|
||||
import platform
|
||||
import re
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
# --- Import Common Module ---
|
||||
import vmaf_common as common
|
||||
|
||||
# --- Configuration ---
|
||||
TARGET_VMAF_MIN = common.DEFAULT_CONFIG["target_vmaf"]
|
||||
MIN_SAVINGS_PERCENT = common.DEFAULT_CONFIG["min_savings"]
|
||||
MAX_JOBS = common.DEFAULT_CONFIG["gpu_jobs"]
|
||||
|
||||
DEFAULT_AV1_QP = 32
|
||||
DEFAULT_HEVC_QP = 28
|
||||
|
||||
SAMPLE_DURATION = 60
|
||||
SAMPLE_START_TIME = 300
|
||||
|
||||
TEMP_DIR = None # Will be set in main
|
||||
|
||||
# Tools (Local override if needed, else used from common checks)
|
||||
FFMPEG_BIN = "ffmpeg"
|
||||
AB_AV1_BIN = "ab-av1"
|
||||
|
||||
# Global state
|
||||
shutdown_requested = False
|
||||
active_processes = set()
|
||||
upload_lock = threading.Lock()
|
||||
proc_lock = threading.Lock()
|
||||
debug_mode = False
|
||||
|
||||
def handle_sigint(signum, frame):
|
||||
global shutdown_requested
|
||||
print("\n\n[!] CRITICAL: Shutdown requested (Ctrl+C).")
|
||||
print(" Killing active encoder processes...")
|
||||
shutdown_requested = True
|
||||
|
||||
with proc_lock:
|
||||
for proc in list(active_processes):
|
||||
try:
|
||||
proc.terminate()
|
||||
time.sleep(0.1)
|
||||
if proc.poll() is None:
|
||||
proc.kill()
|
||||
except:
|
||||
pass
|
||||
print(" Cleanup complete. Exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
signal.signal(signal.SIGINT, handle_sigint)
|
||||
|
||||
# --- Hardware Detection ---
|
||||
def detect_hardware_encoder():
|
||||
"""Detects available hardware encoders via ffmpeg"""
|
||||
try:
|
||||
res = subprocess.run([FFMPEG_BIN, "-hide_banner", "-encoders"], capture_output=True, text=True)
|
||||
out = res.stdout
|
||||
|
||||
av1_enc = None
|
||||
hevc_enc = None
|
||||
|
||||
# Check AMD
|
||||
if "av1_amf" in out: av1_enc = "av1_amf"
|
||||
if "hevc_amf" in out: hevc_enc = "hevc_amf"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "amf"
|
||||
|
||||
# Check NVIDIA
|
||||
if "av1_nvenc" in out: av1_enc = "av1_nvenc"
|
||||
if "hevc_nvenc" in out: hevc_enc = "hevc_nvenc"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "nvenc"
|
||||
|
||||
# Check Intel
|
||||
if "av1_qsv" in out: av1_enc = "av1_qsv"
|
||||
if "hevc_qsv" in out: hevc_enc = "hevc_qsv"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "qsv"
|
||||
|
||||
# Check Apple
|
||||
if "av1_videotoolbox" in out: av1_enc = "av1_videotoolbox"
|
||||
if "hevc_videotoolbox" in out: hevc_enc = "hevc_videotoolbox"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "videotoolbox"
|
||||
|
||||
return None, None, "cpu"
|
||||
|
||||
except Exception as e:
|
||||
print(f"[Warning] HW Detection failed: {e}")
|
||||
return None, None, "cpu"
|
||||
|
||||
def get_encoder_args(codec, encoder, qp):
|
||||
"""Returns correct ffmpeg args for specific HW vendor"""
|
||||
if not encoder: return []
|
||||
|
||||
# AMD AMF
|
||||
if "amf" in encoder:
|
||||
common_args = ["-rc", "cqp", "-qp_i", str(qp), "-qp_p", str(qp), "-qp_b", str(qp), "-quality", "quality"]
|
||||
return ["-c:v", encoder, "-usage", "transcoding"] + common_args
|
||||
|
||||
# NVIDIA NVENC
|
||||
if "nvenc" in encoder:
|
||||
return ["-c:v", encoder, "-rc", "constqp", "-qp", str(qp), "-preset", "p6", "-spatial-aq", "1"]
|
||||
|
||||
# Intel QSV
|
||||
if "qsv" in encoder:
|
||||
return ["-c:v", encoder, "-global_quality", str(qp), "-look_ahead", "1"]
|
||||
|
||||
# Apple VideoToolbox
|
||||
if "videotoolbox" in encoder:
|
||||
q = int(100 - (qp * 2))
|
||||
return ["-c:v", encoder, "-q:v", str(q)]
|
||||
|
||||
# Software Fallback
|
||||
if encoder == "libsvtav1":
|
||||
# CRF 20-35 range usually good
|
||||
return ["-c:v", "libsvtav1", "-crf", str(qp), "-preset", "6", "-g", "240"]
|
||||
|
||||
if encoder == "libx265":
|
||||
return ["-c:v", "libx265", "-crf", str(qp), "-preset", "medium"]
|
||||
|
||||
return []
|
||||
|
||||
# --- Helpers ---
|
||||
def run_process(cmd, description="", status_callback=None):
|
||||
"""Run a process with real-time output and clean shutdown tracking"""
|
||||
if shutdown_requested: return False
|
||||
if status_callback: status_callback(description)
|
||||
|
||||
try:
|
||||
# Windows: Hide console window
|
||||
cflags = 0x08000000 if platform.system() == 'Windows' else 0
|
||||
|
||||
proc = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
bufsize=1,
|
||||
creationflags=cflags
|
||||
)
|
||||
|
||||
with proc_lock:
|
||||
active_processes.add(proc)
|
||||
|
||||
if proc.stdout:
|
||||
for line in proc.stdout:
|
||||
if shutdown_requested:
|
||||
proc.terminate()
|
||||
break
|
||||
line = line.strip()
|
||||
if line:
|
||||
if debug_mode: print(f" [Debug] {line}")
|
||||
|
||||
if status_callback and ("frame=" in line or "size=" in line or "time=" in line):
|
||||
status_callback(line)
|
||||
|
||||
proc.wait()
|
||||
|
||||
with proc_lock:
|
||||
if proc in active_processes:
|
||||
active_processes.remove(proc)
|
||||
|
||||
return proc.returncode == 0
|
||||
except Exception as e:
|
||||
if status_callback: status_callback(f"Error: {e}")
|
||||
return False
|
||||
|
||||
def run_vmaf_check(reference, distorted, status_callback=None):
|
||||
"""Run ab-av1 vmaf to get score"""
|
||||
# Use common dependency check to find binary if needed, but here just assume it's in path or bin
|
||||
ab_exe = "ab-av1"
|
||||
|
||||
# Check if bundled exists
|
||||
bundled = Path(__file__).parent / "bin" / "ab-av1.exe"
|
||||
if bundled.exists():
|
||||
ab_exe = str(bundled)
|
||||
|
||||
cmd = [ab_exe, "vmaf", "--reference", str(reference), "--distorted", str(distorted)]
|
||||
if status_callback: status_callback("Calculating VMAF...")
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
for line in result.stdout.splitlines():
|
||||
line = line.strip()
|
||||
match = re.search(r"VMAF\s+([0-9.]+)", line)
|
||||
if match: return float(match.group(1))
|
||||
try:
|
||||
val = float(line)
|
||||
if 0 <= val <= 100: return val
|
||||
except: pass
|
||||
return 0.0
|
||||
except Exception:
|
||||
return -1.0
|
||||
|
||||
# --- Core Logic ---
|
||||
def process_file(filepath, log_category, lock_dir, log_dir, encoders, worker_id=0, status_cb=None):
|
||||
"""
|
||||
Process a single file.
|
||||
status_cb: function(worker_id, filename, status_text, color)
|
||||
"""
|
||||
av1_enc, hevc_enc, hw_type = encoders
|
||||
filepath = Path(filepath)
|
||||
filename = filepath.name
|
||||
|
||||
def update(msg, color="white"):
|
||||
if status_cb: status_cb(worker_id, filename, msg, color)
|
||||
else: print(f"[{worker_id}] {msg}")
|
||||
|
||||
if shutdown_requested: return
|
||||
|
||||
# 1. Lock Check (Shared Storage)
|
||||
lock_file = common.acquire_lock(lock_dir, filepath)
|
||||
if not lock_file:
|
||||
return # Locked or skipped
|
||||
|
||||
try:
|
||||
update("Analyzing...", "blue")
|
||||
|
||||
# 2. Analyze Source
|
||||
info = common.get_video_info(filepath)
|
||||
if not info:
|
||||
update("Metadata Error", "red")
|
||||
return
|
||||
|
||||
if info["codec"] == "av1":
|
||||
update("Already AV1 (Skipping)", "green")
|
||||
time.sleep(1)
|
||||
return
|
||||
|
||||
# 3. Create Samples
|
||||
sample_ref = TEMP_DIR / f"{filepath.stem}_{worker_id}_ref.mkv"
|
||||
sample_enc = TEMP_DIR / f"{filepath.stem}_{worker_id}_enc.mkv"
|
||||
|
||||
sample_start = SAMPLE_START_TIME
|
||||
if info["duration"] < (SAMPLE_START_TIME + SAMPLE_DURATION):
|
||||
sample_start = max(0, (info["duration"] / 2) - (SAMPLE_DURATION / 2))
|
||||
|
||||
update("Extracting Ref", "magenta")
|
||||
cmd_ref = [
|
||||
FFMPEG_BIN, "-y", "-hide_banner", "-loglevel", "error",
|
||||
"-ss", str(sample_start), "-t", str(SAMPLE_DURATION),
|
||||
"-i", str(filepath), "-c", "copy", "-map", "0:v:0",
|
||||
str(sample_ref)
|
||||
]
|
||||
if not run_process(cmd_ref):
|
||||
update("Extract Ref Failed", "red")
|
||||
return
|
||||
|
||||
# TEST 1: AV1
|
||||
vmaf_score = 0
|
||||
savings = 0
|
||||
|
||||
if av1_enc:
|
||||
update(f"Testing AV1 QP{DEFAULT_AV1_QP}", "yellow")
|
||||
enc_args = get_encoder_args("av1", av1_enc, DEFAULT_AV1_QP)
|
||||
cmd_enc = [
|
||||
FFMPEG_BIN, "-y", "-hide_banner", "-loglevel", "error",
|
||||
"-i", str(sample_ref), *enc_args, "-an", str(sample_enc)
|
||||
]
|
||||
|
||||
if run_process(cmd_enc):
|
||||
update("Calculating VMAF", "cyan")
|
||||
vmaf_score = run_vmaf_check(sample_ref, sample_enc)
|
||||
|
||||
ref_size = sample_ref.stat().st_size
|
||||
enc_size = sample_enc.stat().st_size
|
||||
savings = (1 - (enc_size / ref_size)) * 100 if ref_size > 0 else 0
|
||||
else:
|
||||
update("AV1 Test Failed", "red")
|
||||
|
||||
chosen_codec = None
|
||||
chosen_qp = 0
|
||||
|
||||
# Decision Logic
|
||||
if vmaf_score >= TARGET_VMAF_MIN and savings >= MIN_SAVINGS_PERCENT:
|
||||
chosen_codec = "av1"
|
||||
chosen_qp = DEFAULT_AV1_QP
|
||||
update(f"AV1 Good (VMAF {vmaf_score:.1f})", "green")
|
||||
|
||||
# Smart Optimization
|
||||
if vmaf_score > 97.0:
|
||||
update(f"Optimizing (High Quality {vmaf_score:.1f})", "yellow")
|
||||
new_qp = DEFAULT_AV1_QP + 4
|
||||
args_opt = get_encoder_args("av1", av1_enc, new_qp)
|
||||
cmd_opt = [FFMPEG_BIN, "-y", "-hide_banner", "-loglevel", "error", "-i", str(sample_ref), *args_opt, "-an", str(sample_enc)]
|
||||
|
||||
if run_process(cmd_opt):
|
||||
vmaf_opt = run_vmaf_check(sample_ref, sample_enc)
|
||||
size_opt = sample_enc.stat().st_size
|
||||
sav_opt = (1 - (size_opt / sample_ref.stat().st_size)) * 100
|
||||
|
||||
if vmaf_opt >= TARGET_VMAF_MIN and sav_opt > savings:
|
||||
update(f"Opt Accepted (+{sav_opt - savings:.1f}%)", "green")
|
||||
chosen_qp = new_qp
|
||||
vmaf_score = vmaf_opt
|
||||
savings = sav_opt
|
||||
else:
|
||||
update("Testing HEVC Fallback", "magenta")
|
||||
if info["codec"] != "hevc" and hevc_enc:
|
||||
hevc_args = get_encoder_args("hevc", hevc_enc, DEFAULT_HEVC_QP)
|
||||
cmd_hevc = [FFMPEG_BIN, "-y", "-hide_banner", "-loglevel", "error", "-i", str(sample_ref), *hevc_args, "-an", str(sample_enc)]
|
||||
run_process(cmd_hevc)
|
||||
|
||||
vmaf_score = run_vmaf_check(sample_ref, sample_enc)
|
||||
enc_size = sample_enc.stat().st_size
|
||||
savings = (1 - (enc_size / sample_ref.stat().st_size)) * 100
|
||||
|
||||
if vmaf_score >= TARGET_VMAF_MIN and savings >= MIN_SAVINGS_PERCENT:
|
||||
update(f"HEVC Accepted (VMAF {vmaf_score:.1f})", "green")
|
||||
chosen_codec = "hevc"
|
||||
chosen_qp = DEFAULT_HEVC_QP
|
||||
else:
|
||||
update("HEVC Rejected", "red")
|
||||
common.log_event(log_dir, "rejected.jsonl", {"file": str(filepath), "status": "rejected", "vmaf": vmaf_score})
|
||||
else:
|
||||
update("Skipping HEVC", "yellow")
|
||||
|
||||
# Cleanup Samples
|
||||
if sample_ref.exists(): sample_ref.unlink()
|
||||
if sample_enc.exists(): sample_enc.unlink()
|
||||
|
||||
# 4. Full Encode
|
||||
if chosen_codec:
|
||||
update(f"Encoding {chosen_codec.upper()} (QP {chosen_qp})", "green")
|
||||
output_file = TEMP_DIR / f"{filepath.stem}.{chosen_codec}.mkv"
|
||||
final_args = get_encoder_args(chosen_codec, av1_enc if chosen_codec=="av1" else hevc_enc, chosen_qp)
|
||||
|
||||
cmd_full = [
|
||||
FFMPEG_BIN, "-y", "-hide_banner", "-loglevel", "info", "-stats",
|
||||
"-i", str(filepath),
|
||||
*final_args,
|
||||
"-c:a", "copy", "-c:s", "copy", "-map", "0",
|
||||
str(output_file)
|
||||
]
|
||||
|
||||
def prog_cb(msg):
|
||||
if "frame=" in msg:
|
||||
try:
|
||||
if "time=" in msg:
|
||||
t_str = msg.split("time=")[1].split(" ")[0]
|
||||
h, m, s = map(float, t_str.split(':'))
|
||||
cur_sec = h*3600 + m*60 + s
|
||||
percent = (cur_sec / info["duration"]) * 100
|
||||
else:
|
||||
percent = 0.0
|
||||
|
||||
speed = "1x"
|
||||
if "speed=" in msg:
|
||||
speed = msg.split("speed=")[1].split("x")[0] + "x"
|
||||
|
||||
update(f"Encoding {chosen_codec} | {percent:.1f}% | {speed}", "green")
|
||||
except:
|
||||
pass
|
||||
|
||||
if run_process(cmd_full, f"Full Encode ({chosen_codec.upper()} QP {chosen_qp})", status_callback=prog_cb):
|
||||
final_info = common.get_video_info(output_file)
|
||||
if not final_info: final_info = {"size": output_file.stat().st_size}
|
||||
|
||||
final_size = final_info["size"]
|
||||
final_savings = (1 - (final_size / info["size"])) * 100
|
||||
saved_bytes = info["size"] - final_size
|
||||
|
||||
update(f"Uploading (Saved {final_savings:.1f}%)", "blue")
|
||||
|
||||
# UPLOAD (Serialized)
|
||||
with upload_lock:
|
||||
update(f"Uploading...", "blue")
|
||||
backup_path = filepath.with_suffix(f"{filepath.suffix}.original")
|
||||
try:
|
||||
shutil.move(str(filepath), str(backup_path))
|
||||
shutil.copy2(str(output_file), str(filepath))
|
||||
|
||||
# Verify integrity
|
||||
if Path(filepath).stat().st_size == final_size:
|
||||
output_file.unlink()
|
||||
backup_path.unlink()
|
||||
|
||||
# Refresh metadata for accuracy
|
||||
final_info_verified = common.get_video_info(filepath)
|
||||
|
||||
common.log_event(log_dir, f"{log_category}.jsonl", {
|
||||
"file": str(filepath),
|
||||
"status": "success",
|
||||
"codec": chosen_codec,
|
||||
"vmaf": vmaf_score,
|
||||
"savings": final_savings,
|
||||
"original_metadata": info,
|
||||
"encoded_metadata": final_info_verified or final_info
|
||||
})
|
||||
update("Done", "green")
|
||||
if status_cb: status_cb(worker_id, filename, f"STATS:SAVED:{saved_bytes}", "green")
|
||||
else:
|
||||
update("Upload Failed (Size Mismatch)", "red")
|
||||
shutil.move(str(backup_path), str(filepath))
|
||||
except Exception as e:
|
||||
update(f"Move Error: {str(e)[:20]}", "red")
|
||||
if backup_path.exists(): shutil.move(str(backup_path), str(filepath))
|
||||
else:
|
||||
update("Encode Failed", "red")
|
||||
if output_file.exists(): output_file.unlink()
|
||||
|
||||
except Exception as e:
|
||||
update(f"Error: {str(e)[:30]}", "red")
|
||||
finally:
|
||||
if lock_file.exists(): lock_file.unlink()
|
||||
update("Idle", "dim")
|
||||
|
||||
def main():
|
||||
global debug_mode
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--tv-dir", default=common.DEFAULT_CONFIG["tv_dir"])
|
||||
parser.add_argument("--content-dir", default=common.DEFAULT_CONFIG["content_dir"])
|
||||
parser.add_argument("--jobs", type=int, default=MAX_JOBS)
|
||||
parser.add_argument("--debug", action="store_true")
|
||||
parser.add_argument("--skip-until", help="Skip all files alphabetically until this filename substring is found")
|
||||
parser.add_argument("--cpu-only", action="store_true", help="Force software encoding (CPU only)")
|
||||
parser.add_argument("--temp-dir", help="Override local temp directory")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.debug:
|
||||
debug_mode = True
|
||||
print("[Debug Mode Enabled]")
|
||||
|
||||
# 0. Check Dependencies
|
||||
common.check_dependencies()
|
||||
|
||||
# 1. Setup Directories
|
||||
lock_dir, log_dir = common.get_base_paths(args)
|
||||
|
||||
global TEMP_DIR
|
||||
TEMP_DIR = common.get_temp_dir(args)
|
||||
|
||||
# 2. Detect Hardware
|
||||
av1, hevc, hw = common.detect_hardware_encoder(args)
|
||||
|
||||
print("="*60)
|
||||
print(f" SMART ENCODER | Hardware: {hw.upper()} | Jobs: {args.jobs}")
|
||||
|
||||
if hw == "cpu":
|
||||
print(f" [!] Fallback to CPU Software Encoding (Slow)")
|
||||
if av1: print(f" AV1: {av1} (libsvtav1)")
|
||||
if hevc: print(f" HEVC: {hevc} (libx265)")
|
||||
else:
|
||||
print(f" AV1: {av1} | HEVC: {hevc}")
|
||||
|
||||
print(f" Locks: {lock_dir}")
|
||||
print("="*60)
|
||||
|
||||
# 3. Scan & Queue
|
||||
tasks = []
|
||||
|
||||
tv_path = Path(args.tv_dir)
|
||||
if tv_path.exists():
|
||||
print(f"Scanning TV: {tv_path}")
|
||||
files = list(tv_path.rglob("*.mkv")) + list(tv_path.rglob("*.mp4"))
|
||||
files.sort(key=lambda x: x.stat().st_size, reverse=True)
|
||||
for f in files: tasks.append((f, "tv_shows"))
|
||||
|
||||
content_path = Path(args.content_dir)
|
||||
if content_path.exists():
|
||||
print(f"Scanning Content: {content_path}")
|
||||
files = list(content_path.rglob("*.mkv")) + list(content_path.rglob("*.mp4"))
|
||||
files.sort(key=lambda x: x.stat().st_size, reverse=True)
|
||||
for f in files: tasks.append((f, "content"))
|
||||
|
||||
if not tasks:
|
||||
print("No files found.")
|
||||
return
|
||||
|
||||
# 4. Execute
|
||||
print(f"\n🚀 Processing {len(tasks)} files...")
|
||||
|
||||
with ThreadPoolExecutor(max_workers=args.jobs) as executor:
|
||||
futures = {
|
||||
executor.submit(process_file, f, cat, lock_dir, log_dir, (av1, hevc, hw)): f
|
||||
for f, cat in tasks
|
||||
}
|
||||
|
||||
for future in as_completed(futures):
|
||||
if shutdown_requested:
|
||||
executor.shutdown(wait=False, cancel_futures=True)
|
||||
break
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
print(f"Worker Error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
357
src/smart_monitor.py
Normal file
357
src/smart_monitor.py
Normal file
@@ -0,0 +1,357 @@
|
||||
#!/usr/bin/env python3
|
||||
r"""
|
||||
Smart VMAF Monitor & Dashboard
|
||||
------------------------------
|
||||
The main interface for the VMAF Optimizer.
|
||||
Provides a TUI (Text User Interface) to visualize encoding progress.
|
||||
|
||||
Usage:
|
||||
python smart_monitor.py --tv-dir "Z:\tv" --content-dir "Z:\content" --jobs 4 [--monitor]
|
||||
"""
|
||||
|
||||
import os
|
||||
import time
|
||||
import argparse
|
||||
import sys
|
||||
import threading
|
||||
import queue
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
# Import the engine
|
||||
import smart_gpu_encoder as encoder
|
||||
import vmaf_common as common
|
||||
|
||||
# UI Library
|
||||
try:
|
||||
from rich.console import Console
|
||||
from rich.live import Live
|
||||
from rich.table import Table
|
||||
from rich.layout import Layout
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
HAS_RICH = True
|
||||
except ImportError:
|
||||
HAS_RICH = False
|
||||
print("Warning: 'rich' library not found. Running in basic mode.")
|
||||
|
||||
# Watchdog
|
||||
try:
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
HAS_WATCHDOG = True
|
||||
except ImportError:
|
||||
HAS_WATCHDOG = False
|
||||
|
||||
# --- Caching ---
|
||||
CACHE_FILE = Path("library_cache.json")
|
||||
|
||||
def load_cache():
|
||||
if CACHE_FILE.exists():
|
||||
try:
|
||||
with open(CACHE_FILE, "r") as f:
|
||||
return set(json.load(f))
|
||||
except: pass
|
||||
return set()
|
||||
|
||||
def save_cache(files):
|
||||
try:
|
||||
with open(CACHE_FILE, "w") as f:
|
||||
# Convert paths to strings for JSON
|
||||
json.dump([str(f) for f in files], f)
|
||||
except: pass
|
||||
|
||||
def fast_scan(path):
|
||||
"""Recursive scan using scandir (faster than pathlib)"""
|
||||
files = []
|
||||
try:
|
||||
if not os.path.exists(path): return []
|
||||
for entry in os.scandir(path):
|
||||
if entry.is_dir():
|
||||
files.extend(fast_scan(entry.path))
|
||||
elif entry.is_file():
|
||||
if entry.name.lower().endswith(('.mkv', '.mp4')):
|
||||
if "_enc" not in entry.name and "_ref" not in entry.name:
|
||||
files.append(entry.path)
|
||||
except:
|
||||
pass
|
||||
return files
|
||||
|
||||
# --- UI State ---
|
||||
class Dashboard:
|
||||
def __init__(self, num_workers):
|
||||
self.num_workers = num_workers
|
||||
self.worker_status = {i: {"file": "Idle", "action": "Waiting", "progress": 0, "speed": "", "color": "dim"} for i in range(num_workers)}
|
||||
self.stats = {"processed": 0, "skipped": 0, "failed": 0, "rejected": 0, "savings_gb": 0.0}
|
||||
self.recent_completed = []
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def format_filename(self, filename):
|
||||
# Clean Sonarr format: {Series} - S{s}E{e} - {Title} {Quality}
|
||||
# Goal: Series S01E01 [Quality]
|
||||
try:
|
||||
# Match S01E01
|
||||
match = re.search(r"(.*?) - (S\d+E\d+) - .*? ((?:Bluray|WebDL|Remux|2160p|1080p|Proper).*)\.mkv", filename, re.IGNORECASE)
|
||||
if match:
|
||||
series = match.group(1)[:15] # Truncate series name
|
||||
s_e = match.group(2)
|
||||
quality = match.group(3).split()[0] # Just take first part of quality (Bluray-2160p)
|
||||
return f"{series} {s_e} [{quality}]"
|
||||
|
||||
# Fallback for simpler names
|
||||
if len(filename) > 35:
|
||||
return filename[:15] + "..." + filename[-15:]
|
||||
return filename
|
||||
except:
|
||||
return filename
|
||||
|
||||
def update_worker(self, worker_id, file, action, color="blue"):
|
||||
with self.lock:
|
||||
display_name = self.format_filename(file)
|
||||
|
||||
progress = 0
|
||||
speed = ""
|
||||
|
||||
# Parse rich status: "Encoding AV1 | 45.2% | 2.3x"
|
||||
if "|" in action:
|
||||
parts = action.split("|")
|
||||
action_text = parts[0].strip()
|
||||
for p in parts[1:]:
|
||||
p = p.strip()
|
||||
if "%" in p:
|
||||
try: progress = float(p.replace("%", ""))
|
||||
except: pass
|
||||
elif "x" in p or "MB/s" in p:
|
||||
speed = p
|
||||
action = action_text
|
||||
|
||||
self.worker_status[worker_id] = {
|
||||
"file": display_name,
|
||||
"action": action,
|
||||
"progress": progress,
|
||||
"speed": speed,
|
||||
"color": color
|
||||
}
|
||||
|
||||
def add_log(self, message):
|
||||
with self.lock:
|
||||
ts = time.strftime("%H:%M:%S")
|
||||
self.recent_completed.insert(0, f"[{ts}] {message}")
|
||||
if len(self.recent_completed) > 12:
|
||||
self.recent_completed.pop()
|
||||
|
||||
def update_stats(self, key, val=1):
|
||||
with self.lock:
|
||||
if key == "savings_gb": self.stats[key] += val
|
||||
else: self.stats[key] += val
|
||||
|
||||
def get_renderable(self):
|
||||
if not HAS_RICH: return ""
|
||||
|
||||
layout = Layout()
|
||||
layout.split_column(
|
||||
Layout(name="header", size=3),
|
||||
Layout(name="workers", size=self.num_workers + 4),
|
||||
Layout(name="stats", size=3),
|
||||
Layout(name="logs", ratio=1)
|
||||
)
|
||||
|
||||
layout["header"].update(Panel(Text("Smart GPU Encoder (AMD AMF + VMAF)", justify="center", style="bold cyan")))
|
||||
|
||||
# Workers Table
|
||||
table = Table(box=None, expand=True, padding=(0, 1))
|
||||
table.add_column("ID", width=3, style="dim", no_wrap=True)
|
||||
table.add_column("File", ratio=6, no_wrap=True)
|
||||
table.add_column("Action", ratio=3, no_wrap=True)
|
||||
table.add_column("Progress", width=20, no_wrap=True)
|
||||
table.add_column("Speed", width=12, no_wrap=True)
|
||||
|
||||
for i in range(self.num_workers):
|
||||
ws = self.worker_status[i]
|
||||
|
||||
pct = ws["progress"]
|
||||
# Rich Bar
|
||||
if pct > 0:
|
||||
bar_len = 12
|
||||
filled = int(bar_len * (pct / 100))
|
||||
bar_str = "━" * filled + " " * (bar_len - filled)
|
||||
prog_render = Text(f"{bar_str} {pct:.1f}%", style="green")
|
||||
else:
|
||||
prog_render = Text("")
|
||||
|
||||
table.add_row(
|
||||
str(i+1),
|
||||
Text(ws["file"], style="white"),
|
||||
Text(ws["action"], style=ws["color"]),
|
||||
prog_render,
|
||||
Text(ws["speed"], style="yellow")
|
||||
)
|
||||
|
||||
layout["workers"].update(Panel(table, title="Active Workers"))
|
||||
|
||||
stat_str = f"Processed: [green]{self.stats['processed']}[/] | Skipped: [yellow]{self.stats['skipped']}[/] | Rejected: [magenta]{self.stats['rejected']}[/] | Failed: [red]{self.stats['failed']}[/] | Saved: [bold green]{self.stats['savings_gb']:.2f} GB[/]"
|
||||
layout["stats"].update(Panel(Text.from_markup(stat_str, justify="center")))
|
||||
|
||||
layout["logs"].update(Panel("\n".join(self.recent_completed), title="Activity Log"))
|
||||
|
||||
return layout
|
||||
|
||||
# --- Worker Bridge ---
|
||||
def worker_wrapper(worker_id, file_path, category, lock_dir, log_dir, encoders, dashboard):
|
||||
def status_callback(w_id, fname, msg, color):
|
||||
# Handle Stats Signal
|
||||
if msg.startswith("STATS:"):
|
||||
parts = msg.split(":")
|
||||
if parts[1] == "SAVED":
|
||||
try:
|
||||
bytes_saved = float(parts[2])
|
||||
dashboard.update_stats("savings_gb", bytes_saved / (1024**3))
|
||||
dashboard.update_stats("processed")
|
||||
except: pass
|
||||
return
|
||||
|
||||
# Update Live Table
|
||||
dashboard.update_worker(worker_id, fname, msg, color)
|
||||
|
||||
# Check for Completion Events to update History
|
||||
# Match keywords from smart_gpu_encoder.py
|
||||
if "Done" in msg:
|
||||
dashboard.add_log(f"[Success] {dashboard.format_filename(fname)}")
|
||||
elif "Skipped" in msg or "Skipping" in msg:
|
||||
dashboard.add_log(f"[Skip] {dashboard.format_filename(fname)}")
|
||||
dashboard.update_stats("skipped")
|
||||
elif "Rejected" in msg:
|
||||
dashboard.add_log(f"[Reject] {dashboard.format_filename(fname)}")
|
||||
dashboard.update_stats("rejected")
|
||||
# Rejected doesn't count as failed, just processed (or ignored stats)
|
||||
elif "Failed" in msg or "Error" in msg:
|
||||
dashboard.add_log(f"[Fail] {dashboard.format_filename(fname)}")
|
||||
dashboard.update_stats("failed")
|
||||
|
||||
try:
|
||||
encoder.process_file(file_path, category, lock_dir, log_dir, encoders, worker_id, status_callback)
|
||||
except Exception as e:
|
||||
dashboard.add_log(f"Error in worker {worker_id}: {str(e)[:30]}")
|
||||
dashboard.update_stats("failed")
|
||||
finally:
|
||||
dashboard.update_worker(worker_id, "Idle", "Waiting", "dim")
|
||||
|
||||
# --- Monitor ---
|
||||
class WatcherHandler(FileSystemEventHandler):
|
||||
def __init__(self, queue):
|
||||
self.queue = queue
|
||||
def on_created(self, event):
|
||||
if not event.is_directory and event.src_path.lower().endswith(('.mkv', '.mp4')):
|
||||
self.queue.put(Path(event.src_path))
|
||||
def on_moved(self, event):
|
||||
if not event.is_directory and event.dest_path.lower().endswith(('.mkv', '.mp4')):
|
||||
self.queue.put(Path(event.dest_path))
|
||||
|
||||
# --- Main ---
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--tv-dir", default=common.DEFAULT_CONFIG["tv_dir"])
|
||||
parser.add_argument("--content-dir", default=common.DEFAULT_CONFIG["content_dir"])
|
||||
parser.add_argument("--jobs", type=int, default=4)
|
||||
parser.add_argument("--monitor", action="store_true")
|
||||
parser.add_argument("--cpu-only", action="store_true", help="Force software encoding")
|
||||
parser.add_argument("--temp-dir", help="Override local temp directory")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Setup
|
||||
lock_dir, log_dir = common.get_base_paths(args)
|
||||
encoder.TEMP_DIR = common.get_temp_dir(args)
|
||||
|
||||
# Detect HW
|
||||
encoders = common.detect_hardware_encoder(args)
|
||||
|
||||
# UI
|
||||
dashboard = Dashboard(args.jobs)
|
||||
dashboard.add_log(f"Logs: {log_dir}")
|
||||
|
||||
# Work Queue
|
||||
work_queue = queue.Queue()
|
||||
|
||||
# Background Scanner
|
||||
def background_scanner():
|
||||
time.sleep(2) # Let UI start
|
||||
dashboard.add_log("Starting background scan...")
|
||||
|
||||
# Load Cache first
|
||||
cached_files = load_cache()
|
||||
if cached_files:
|
||||
dashboard.add_log(f"Loaded {len(cached_files)} files from cache.")
|
||||
for f in cached_files:
|
||||
p = Path(f)
|
||||
cat = "tv_shows" if str(args.tv_dir) in str(p) else "content"
|
||||
work_queue.put((p, cat))
|
||||
|
||||
# Real Scan
|
||||
all_files = []
|
||||
for d, cat in [(args.tv_dir, "tv_shows"), (args.content_dir, "content")]:
|
||||
found = fast_scan(d)
|
||||
for f in found:
|
||||
all_files.append(f)
|
||||
# Only add if NOT in cache
|
||||
if str(f) not in cached_files:
|
||||
work_queue.put((Path(f), cat))
|
||||
|
||||
dashboard.add_log(f"Scan complete. Total: {len(all_files)}")
|
||||
save_cache(all_files)
|
||||
|
||||
# Start Scanner Thread
|
||||
scan_thread = threading.Thread(target=background_scanner, daemon=True)
|
||||
scan_thread.start()
|
||||
|
||||
# Thread Pool for Workers
|
||||
threads = []
|
||||
|
||||
def worker_loop(w_id):
|
||||
while not encoder.shutdown_requested:
|
||||
try:
|
||||
item = work_queue.get(timeout=1)
|
||||
file_path, category = item
|
||||
worker_wrapper(w_id, file_path, category, lock_dir, log_dir, encoders, dashboard)
|
||||
work_queue.task_done()
|
||||
except queue.Empty:
|
||||
# If batch mode and scan is done and queue is empty, exit
|
||||
if not args.monitor and not scan_thread.is_alive() and work_queue.empty():
|
||||
time.sleep(2) # Grace period
|
||||
if work_queue.empty():
|
||||
return # Exit thread
|
||||
continue
|
||||
except Exception as e:
|
||||
dashboard.add_log(f"Worker {w_id} crashed: {e}")
|
||||
|
||||
for i in range(args.jobs):
|
||||
t = threading.Thread(target=worker_loop, args=(i,), daemon=True)
|
||||
t.start()
|
||||
threads.append(t)
|
||||
|
||||
# UI Loop
|
||||
try:
|
||||
if HAS_RICH:
|
||||
with Live(dashboard.get_renderable(), refresh_per_second=4) as live:
|
||||
while not encoder.shutdown_requested:
|
||||
live.update(dashboard.get_renderable())
|
||||
time.sleep(0.25)
|
||||
|
||||
# Exit condition
|
||||
if not args.monitor and not scan_thread.is_alive() and work_queue.unfinished_tasks == 0:
|
||||
# Check if threads are actually dead
|
||||
if all(not t.is_alive() for t in threads):
|
||||
break
|
||||
else:
|
||||
while not encoder.shutdown_requested:
|
||||
time.sleep(1)
|
||||
if not args.monitor and not scan_thread.is_alive() and work_queue.empty(): break
|
||||
|
||||
except KeyboardInterrupt:
|
||||
encoder.shutdown_requested = True
|
||||
print("\nStopping...")
|
||||
|
||||
print("\nDone.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
47
src/test_monitor_logic.py
Normal file
47
src/test_monitor_logic.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from smart_monitor import Dashboard
|
||||
import time
|
||||
|
||||
def test_tui_parsing():
|
||||
print("Testing Dashboard Logic...")
|
||||
db = Dashboard(1)
|
||||
|
||||
# Test 1: Encoding Progress
|
||||
msg = "Encoding av1 | 45.2% | 2.3x"
|
||||
db.update_worker(0, "Test File 1.mkv", msg)
|
||||
status = db.worker_status[0]
|
||||
print(f"Test 1 (Parsing): Progress={status['progress']} (Expected 45.2), Speed={status['speed']} (Expected 2.3x)")
|
||||
assert status['progress'] == 45.2
|
||||
assert status['speed'] == "2.3x"
|
||||
|
||||
# Test 2: Stats - Skipped
|
||||
print("\nTest 2: Stats - Skipped")
|
||||
# Simulate worker wrapper logic
|
||||
msg = "Already AV1 (Skipping)"
|
||||
if "Skipping" in msg:
|
||||
db.update_stats("skipped")
|
||||
|
||||
print(f"Skipped Count: {db.stats['skipped']} (Expected 1)")
|
||||
assert db.stats['skipped'] == 1
|
||||
|
||||
# Test 3: Stats - Rejected
|
||||
print("\nTest 3: Stats - Rejected")
|
||||
msg = "HEVC Rejected"
|
||||
if "Rejected" in msg:
|
||||
db.update_stats("rejected")
|
||||
|
||||
print(f"Rejected Count: {db.stats['rejected']} (Expected 1)")
|
||||
assert db.stats['rejected'] == 1
|
||||
|
||||
# Test 4: Stats - Failed
|
||||
print("\nTest 4: Stats - Failed")
|
||||
msg = "Error: FFMPEG failed"
|
||||
if "Error" in msg:
|
||||
db.update_stats("failed")
|
||||
|
||||
print(f"Failed Count: {db.stats['failed']} (Expected 1)")
|
||||
assert db.stats['failed'] == 1
|
||||
|
||||
print("\nAll Tests Passed!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_tui_parsing()
|
||||
295
src/vmaf_common.py
Normal file
295
src/vmaf_common.py
Normal file
@@ -0,0 +1,295 @@
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import json
|
||||
import hashlib
|
||||
import platform
|
||||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
# --- Defaults ---
|
||||
DEFAULT_CONFIG = {
|
||||
"tv_dir": r"Z:\tv",
|
||||
"content_dir": r"Z:\content",
|
||||
"target_vmaf": 93.0,
|
||||
"min_savings": 12.0,
|
||||
"gpu_jobs": 2,
|
||||
"cpu_jobs": 1,
|
||||
"av1_crf": 34,
|
||||
"hevc_crf": 28,
|
||||
"temp_dir_windows": r"C:\Users\bnair\Videos\encodes",
|
||||
"temp_dir_linux": "~/Videos/encodes"
|
||||
}
|
||||
|
||||
# --- Paths ---
|
||||
def get_base_paths(args=None):
|
||||
"""
|
||||
Determine root paths for locks and logs.
|
||||
Priority:
|
||||
1. Shared Network Drive (parent of tv_dir) -> Z:\.vmaf_locks
|
||||
2. Local Fallback (current dir / logs)
|
||||
"""
|
||||
tv_dir = Path(args.tv_dir) if args and hasattr(args, 'tv_dir') else Path(DEFAULT_CONFIG["tv_dir"])
|
||||
|
||||
# Logic: Locks MUST be at the common root of content to be shared.
|
||||
# We assume tv_dir is like "Z:\tv", so shared root is "Z:\"
|
||||
shared_root = tv_dir.parent
|
||||
|
||||
# Defaults
|
||||
lock_dir = Path("locks").resolve()
|
||||
log_dir = Path("logs").resolve()
|
||||
|
||||
# Network Logic
|
||||
if shared_root.exists():
|
||||
network_lock = shared_root / ".vmaf_locks"
|
||||
# We prefer local logs to avoid network I/O issues during logging,
|
||||
# but locks MUST be shared.
|
||||
try:
|
||||
network_lock.mkdir(parents=True, exist_ok=True)
|
||||
lock_dir = network_lock
|
||||
except Exception as e:
|
||||
print(f"[Warning] Could not create network locks at {network_lock}: {e}")
|
||||
print(f" Falling back to local locks: {lock_dir}")
|
||||
|
||||
# Ensure existence
|
||||
lock_dir.mkdir(parents=True, exist_ok=True)
|
||||
log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return lock_dir, log_dir
|
||||
|
||||
def get_temp_dir(args=None):
|
||||
# Check args override
|
||||
if args and hasattr(args, 'temp_dir') and args.temp_dir:
|
||||
path = Path(args.temp_dir)
|
||||
elif platform.system() == "Windows":
|
||||
path = Path(DEFAULT_CONFIG["temp_dir_windows"])
|
||||
else:
|
||||
path = Path(DEFAULT_CONFIG["temp_dir_linux"]).expanduser()
|
||||
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
return path
|
||||
|
||||
# --- Logging ---
|
||||
def log_event(log_dir, filename, data):
|
||||
log_path = Path(log_dir) / filename
|
||||
data["timestamp"] = datetime.now().isoformat()
|
||||
try:
|
||||
with open(log_path, "a", encoding="utf-8") as f:
|
||||
f.write(json.dumps(data) + "\n")
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to write log: {e}")
|
||||
|
||||
# --- Dependencies ---
|
||||
def check_dependencies(required_tools=None):
|
||||
if required_tools is None:
|
||||
required_tools = ["ffmpeg", "ffprobe", "ab-av1"]
|
||||
|
||||
missing = []
|
||||
# Determine bin path relative to this file
|
||||
bin_path = Path(__file__).parent.parent / "bin"
|
||||
|
||||
for tool in required_tools:
|
||||
if tool == "ab-av1":
|
||||
# Check bundled first in bin/
|
||||
bundled = bin_path / "ab-av1.exe"
|
||||
if bundled.exists(): continue
|
||||
# Check PATH
|
||||
if shutil.which("ab-av1"): continue
|
||||
missing.append("ab-av1")
|
||||
else:
|
||||
if not shutil.which(tool):
|
||||
missing.append(tool)
|
||||
|
||||
if missing:
|
||||
print(f"[!] CRITICAL: Missing tools: {', '.join(missing)}")
|
||||
print(f" Checked bundled path: {bin_path}")
|
||||
sys.exit(1)
|
||||
|
||||
# --- Metadata ---
|
||||
def get_video_info(filepath):
|
||||
"""Get video info using ffprobe (Standardized)"""
|
||||
try:
|
||||
cmd = [
|
||||
"ffprobe", "-v", "quiet",
|
||||
"-print_format", "json",
|
||||
"-show_format", "-show_streams",
|
||||
str(filepath)
|
||||
]
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
data = json.loads(result.stdout)
|
||||
|
||||
video = next((s for s in data["streams"] if s["codec_type"] == "video"), None)
|
||||
if not video: return None
|
||||
|
||||
size = int(data["format"].get("size", 0))
|
||||
duration = float(data["format"].get("duration", 0))
|
||||
|
||||
# Bitrate calc
|
||||
bitrate = int(data["format"].get("bitrate", 0))
|
||||
if bitrate == 0 and duration > 0:
|
||||
bitrate = int((size * 8) / duration)
|
||||
|
||||
return {
|
||||
"codec": video.get("codec_name"),
|
||||
"width": int(video.get("width", 0)),
|
||||
"height": int(video.get("height", 0)),
|
||||
"duration": duration,
|
||||
"size": size,
|
||||
"bitrate": bitrate,
|
||||
"fps": video.get("r_frame_rate", "0/0")
|
||||
}
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# --- Locks ---
|
||||
def acquire_lock(lock_dir, filepath):
|
||||
"""
|
||||
Simple file-based lock.
|
||||
Returns lock_path if acquired, None if failed.
|
||||
"""
|
||||
# Use hash of filename to avoid long paths/invalid chars
|
||||
fhash = hashlib.md5(str(filepath.name).encode()).hexdigest()
|
||||
lock_file = lock_dir / f"{fhash}.lock"
|
||||
|
||||
if lock_file.exists():
|
||||
# Check staleness (24h)
|
||||
try:
|
||||
if time.time() - lock_file.stat().st_mtime > 86400:
|
||||
lock_file.unlink()
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
return None
|
||||
|
||||
try:
|
||||
lock_file.touch()
|
||||
return lock_file
|
||||
except:
|
||||
return None
|
||||
|
||||
# --- Hardware Detection ---
|
||||
def detect_hardware_encoder(args=None):
|
||||
"""Detects available hardware encoders via ffmpeg (Cross-Platform)"""
|
||||
|
||||
# Check for forced CPU mode via args
|
||||
if args and getattr(args, 'cpu_only', False):
|
||||
# We still need to determine WHICH software encoder to use
|
||||
# But we skip HW checks.
|
||||
try:
|
||||
res = subprocess.run(["ffmpeg", "-hide_banner", "-encoders"], capture_output=True, text=True)
|
||||
out = res.stdout
|
||||
if "libsvtav1" in out: return "libsvtav1", "libx265", "cpu"
|
||||
if "libx265" in out: return None, "libx265", "cpu"
|
||||
return None, None, "cpu"
|
||||
except:
|
||||
return None, None, "cpu"
|
||||
|
||||
try:
|
||||
# Run ffmpeg -encoders
|
||||
res = subprocess.run(["ffmpeg", "-hide_banner", "-encoders"], capture_output=True, text=True)
|
||||
out = res.stdout
|
||||
|
||||
av1_enc = None
|
||||
hevc_enc = None
|
||||
hw_type = "cpu"
|
||||
|
||||
# 1. NVIDIA (NVENC) - Windows/Linux
|
||||
if "av1_nvenc" in out: av1_enc = "av1_nvenc"
|
||||
if "hevc_nvenc" in out: hevc_enc = "hevc_nvenc"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "nvenc"
|
||||
|
||||
# 2. AMD (AMF) - Windows
|
||||
if "av1_amf" in out: av1_enc = "av1_amf"
|
||||
if "hevc_amf" in out: hevc_enc = "hevc_amf"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "amf"
|
||||
|
||||
# 3. AMD (VAAPI) - Linux
|
||||
# Often named hevc_vaapi, av1_vaapi
|
||||
if "av1_vaapi" in out: av1_enc = "av1_vaapi"
|
||||
if "hevc_vaapi" in out: hevc_enc = "hevc_vaapi"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "vaapi"
|
||||
|
||||
# 4. Intel (QSV) - Windows/Linux
|
||||
if "av1_qsv" in out: av1_enc = "av1_qsv"
|
||||
if "hevc_qsv" in out: hevc_enc = "hevc_qsv"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "qsv"
|
||||
|
||||
# 5. Apple Silicon (VideoToolbox) - macOS
|
||||
if "av1_videotoolbox" in out: av1_enc = "av1_videotoolbox"
|
||||
if "hevc_videotoolbox" in out: hevc_enc = "hevc_videotoolbox"
|
||||
if av1_enc or hevc_enc: return av1_enc, hevc_enc, "videotoolbox"
|
||||
|
||||
# Fallback to Software if no HW found
|
||||
# libsvtav1 / libx265
|
||||
if "libsvtav1" in out: av1_enc = "libsvtav1"
|
||||
if "libx265" in out: hevc_enc = "libx265"
|
||||
|
||||
if av1_enc or hevc_enc:
|
||||
return av1_enc, hevc_enc, "cpu"
|
||||
|
||||
return None, None, "none"
|
||||
|
||||
except Exception as e:
|
||||
print(f"[Warning] HW Detection failed: {e}")
|
||||
return None, None, "error"
|
||||
|
||||
def get_encoder_args(codec, encoder, qp):
|
||||
"""Returns correct ffmpeg args for specific HW vendor"""
|
||||
if not encoder: return []
|
||||
|
||||
# Software (CPU)
|
||||
if encoder == "libsvtav1":
|
||||
# CRF 0-63 (Lower is better)
|
||||
return ["-c:v", "libsvtav1", "-crf", str(qp), "-preset", "6", "-g", "240"]
|
||||
|
||||
if encoder == "libx265":
|
||||
return ["-c:v", "libx265", "-crf", str(qp), "-preset", "medium"]
|
||||
|
||||
# NVIDIA NVENC
|
||||
if "nvenc" in encoder:
|
||||
# p6 = better quality, spatial-aq for better perception
|
||||
return ["-c:v", encoder, "-rc", "constqp", "-qp", str(qp), "-preset", "p6", "-spatial-aq", "1"]
|
||||
|
||||
# AMD AMF
|
||||
if "amf" in encoder:
|
||||
return ["-c:v", encoder, "-usage", "transcoding", "-rc", "cqp", "-qp_i", str(qp), "-qp_p", str(qp), "-qp_b", str(qp), "-quality", "quality"]
|
||||
|
||||
# Intel QSV
|
||||
if "qsv" in encoder:
|
||||
return ["-c:v", encoder, "-global_quality", str(qp), "-look_ahead", "1"]
|
||||
|
||||
# Apple VideoToolbox
|
||||
if "videotoolbox" in encoder:
|
||||
# Map 0-51 QP to 100-0 Quality (approx)
|
||||
q = int(100 - (qp * 2))
|
||||
return ["-c:v", encoder, "-q:v", str(q)]
|
||||
|
||||
# Linux VAAPI
|
||||
if "vaapi" in encoder:
|
||||
# Uses -qp normally? or -global_quality? Depends on driver.
|
||||
# Often needs: -vf format=nv12,hwupload
|
||||
# Safe bet for vaapi is usually CQP via -qp or -global_quality
|
||||
return ["-c:v", encoder, "-qp", str(qp)]
|
||||
|
||||
return []
|
||||
|
||||
def scan_directory(directory, extensions=None):
|
||||
"""Scan directory for video files"""
|
||||
if extensions is None:
|
||||
extensions = {".mkv", ".mp4", ".mov", ".avi", ".ts"}
|
||||
|
||||
files = []
|
||||
for dirpath, dirnames, filenames in os.walk(directory):
|
||||
# Skip processed/system files
|
||||
dirnames[:] = [d for d in dirnames if not d.startswith("_") and d not in [".recycle", "@eaDir"]]
|
||||
|
||||
for filename in filenames:
|
||||
filepath = Path(dirpath) / filename
|
||||
if filepath.suffix.lower() in extensions:
|
||||
if "_av1" in filepath.stem.lower() or "_hevc" in filepath.stem.lower():
|
||||
continue
|
||||
files.append(filepath)
|
||||
|
||||
return sorted(files, key=lambda x: x.stat().st_size, reverse=True)
|
||||
Reference in New Issue
Block a user