NY db struktur

This commit is contained in:
2026-04-19 23:45:59 +02:00
parent a9aa451d63
commit efc30cdbb2
6 changed files with 1056 additions and 1390 deletions

View File

@@ -28,29 +28,25 @@ class User(Base):
projects: Mapped[list["Project"]] = relationship("Project", back_populates="owner")
memberships: Mapped[list["ProjectMember"]] = relationship("ProjectMember", back_populates="user")
songs: Mapped[list["Song"]] = relationship("Song", back_populates="owner")
alt_ratings: Mapped[list["DanceAltRating"]] = relationship("DanceAltRating", back_populates="user")
playlist_shares: Mapped[list["PlaylistShare"]] = relationship("PlaylistShare", foreign_keys="PlaylistShare.shared_with_id", back_populates="shared_with")
# ── Song ──────────────────────────────────────────────────────────────────────
# ── Song (global — ikke knyttet til en bruger) ────────────────────────────────
class Song(Base):
__tablename__ = "songs"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
title: Mapped[str] = mapped_column(String(255), nullable=False)
artist: Mapped[str] = mapped_column(String(255), default="")
album: Mapped[str] = mapped_column(String(255), default="")
bpm: Mapped[int] = mapped_column(Integer, default=0)
duration_sec: Mapped[int] = mapped_column(Integer, default=0)
file_format: Mapped[str] = mapped_column(String(8), default="")
mbid: Mapped[str|None] = mapped_column(String(36), nullable=True)
mbid: Mapped[str|None] = mapped_column(String(36), nullable=True, unique=True)
acoustid: Mapped[str|None] = mapped_column(String(64), nullable=True)
synced_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
owner: Mapped["User"] = relationship("User", back_populates="songs")
project_songs: Mapped[list["ProjectSong"]] = relationship("ProjectSong", back_populates="song")
song_dances: Mapped[list["SongDance"]] = relationship("SongDance", back_populates="song", cascade="all, delete-orphan")
song_alt_dances: Mapped[list["SongAltDance"]] = relationship("SongAltDance", back_populates="song", cascade="all, delete-orphan")
@@ -68,7 +64,6 @@ class DanceLevel(Base):
class Dance(Base):
"""Dans-entitet: navn + niveau er unik kombination."""
__tablename__ = "dances"
__table_args__ = (UniqueConstraint("name", "level_id", name="uq_dance_name_level"),)
@@ -80,7 +75,6 @@ class Dance(Base):
stepsheet_url: Mapped[str] = mapped_column(String(512), default="")
notes: Mapped[str] = mapped_column(Text, default="")
use_count: Mapped[int] = mapped_column(Integer, default=1)
source: Mapped[str] = mapped_column(String(16), default="local")
synced_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True)
level: Mapped["DanceLevel|None"] = relationship("DanceLevel")
@@ -95,7 +89,7 @@ class Project(Base):
owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
name: Mapped[str] = mapped_column(String(128), nullable=False)
description: Mapped[str] = mapped_column(Text, default="")
visibility: Mapped[str] = mapped_column(String(16), default="private") # private|shared|public
visibility: Mapped[str] = mapped_column(String(16), default="private")
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, onupdate=now_utc)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
@@ -111,8 +105,8 @@ class ProjectMember(Base):
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False)
user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
role: Mapped[str] = mapped_column(String(16), default="viewer") # owner|editor|viewer
status: Mapped[str] = mapped_column(String(16), default="pending") # pending|accepted
role: Mapped[str] = mapped_column(String(16), default="viewer")
status: Mapped[str] = mapped_column(String(16), default="pending")
invited_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
project: Mapped["Project"] = relationship("Project", back_populates="members")
@@ -135,15 +129,14 @@ class ProjectSong(Base):
class PlaylistShare(Base):
"""Deling af en playlist med specifikke brugere."""
__tablename__ = "playlist_shares"
__table_args__ = (UniqueConstraint("project_id", "shared_with_id", name="uq_share"),)
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False)
shared_with_id: Mapped[str|None] = mapped_column(String(36), ForeignKey("users.id"), nullable=True)
invited_email: Mapped[str] = mapped_column(String(255), default="") # til ikke-registrerede
permission: Mapped[str] = mapped_column(String(16), default="view") # view|copy|edit
invited_email: Mapped[str] = mapped_column(String(255), default="")
permission: Mapped[str] = mapped_column(String(16), default="view")
accepted_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
@@ -154,7 +147,6 @@ class PlaylistShare(Base):
# ── Sang-dans tags ────────────────────────────────────────────────────────────
class SongDance(Base):
"""Dans-tags på en sang (brugerens egne tags)."""
__tablename__ = "song_dances"
__table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_dance"),)
@@ -168,7 +160,6 @@ class SongDance(Base):
class SongAltDance(Base):
"""Alternativ-dans tags på en sang."""
__tablename__ = "song_alt_dances"
__table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_alt_dance"),)
@@ -184,7 +175,6 @@ class SongAltDance(Base):
# ── Community dans-tags ───────────────────────────────────────────────────────
class CommunityDance(Base):
"""Fællesskabets dans-tags på sange."""
__tablename__ = "community_dances"
__table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "dance_id", name="uq_comm_dance"),)
@@ -200,7 +190,6 @@ class CommunityDance(Base):
class CommunityDanceAlt(Base):
"""Fællesskabets alternativ-danse til en sang med ratings."""
__tablename__ = "community_dance_alts"
__table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "alt_dance_id", name="uq_comm_alt"),)
@@ -220,14 +209,13 @@ class CommunityDanceAlt(Base):
class DanceAltRating(Base):
"""1-5 stjerne rating af en alternativ-dans."""
__tablename__ = "dance_alt_ratings"
__table_args__ = (UniqueConstraint("alternative_id", "user_id", name="uq_rating"),)
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
alternative_id: Mapped[str] = mapped_column(String(36), ForeignKey("community_dance_alts.id"), nullable=False)
user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
score: Mapped[int] = mapped_column(Integer, nullable=False) # 1-5
score: Mapped[int] = mapped_column(Integer, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
alternative: Mapped["CommunityDanceAlt"] = relationship("CommunityDanceAlt", back_populates="ratings")

View File

@@ -4,20 +4,22 @@ sync.py — Push/pull synkronisering mellem lokal app og server.
POST /sync/push — send lokal data op til server
GET /sync/pull — hent server-data ned til app
"""
import uuid
import logging
from datetime import datetime, timezone
from fastapi import APIRouter, Depends, HTTPException
from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session
from pydantic import BaseModel
from typing import Optional
from app.core.database import get_db
from app.core.security import get_current_user
from app.models import (
User, Song, Dance, DanceLevel, Project, ProjectSong,
PlaylistShare, CommunityDance, CommunityDanceAlt,
PlaylistShare, CommunityDance, SongDance, SongAltDance,
)
router = APIRouter(prefix="/sync", tags=["sync"])
logger = logging.getLogger(__name__)
# ── Schemas ───────────────────────────────────────────────────────────────────
@@ -29,7 +31,6 @@ class SongData(BaseModel):
album: str = ""
bpm: int = 0
duration_sec: int = 0
file_format: str = ""
mbid: str = ""
acoustid: str = ""
@@ -76,7 +77,60 @@ class PushPayload(BaseModel):
song_dances: list[SongDanceData] = []
song_alts: list[SongAltDanceData] = []
playlists: list[PlaylistData] = []
deleted_playlists: list[str] = [] # server-IDs (api_project_id) på slettede playlister
deleted_playlists: list[str] = [] # server-IDs (Project.id)
# ── Hjælpefunktion: find eller opret sang globalt ─────────────────────────────
def _find_or_create_song(db: Session, title: str, artist: str = "",
mbid: str = "", acoustid: str = "",
album: str = "", bpm: int = 0,
duration_sec: int = 0) -> Song:
"""
Match-hierarki:
1. MBID — sikreste
2. AcoustID
3. Titel + artist
4. Opret ny
"""
if mbid:
song = db.query(Song).filter_by(mbid=mbid).first()
if song:
return song
if acoustid:
song = db.query(Song).filter_by(acoustid=acoustid).first()
if song:
# Tilføj mbid hvis den mangler
if mbid and not song.mbid:
song.mbid = mbid
return song
if title:
song = db.query(Song).filter(
Song.title == title,
Song.artist == artist,
).first()
if song:
# Opdater med bedre data hvis tilgængeligt
if mbid and not song.mbid:
song.mbid = mbid
if acoustid and not song.acoustid:
song.acoustid = acoustid
if bpm and not song.bpm:
song.bpm = bpm
return song
# Opret ny global sang
song = Song(
title=title, artist=artist, album=album,
bpm=bpm, duration_sec=duration_sec,
mbid=mbid or None,
acoustid=acoustid or None,
)
db.add(song)
db.flush()
return song
# ── Push ──────────────────────────────────────────────────────────────────────
@@ -88,78 +142,50 @@ def push(
me: User = Depends(get_current_user),
):
"""Upload lokal data til server. Returnerer server-IDs."""
import sqlalchemy as _sa
song_id_map = {} # local_id → server Song.id
dance_id_map = {} # "name|level" → server Dance.id
level_map = {} # level_name → DanceLevel.id
dance_id_map = {} # "name|level_id" → Dance.id
level_map = {} # level_name.lower() → DanceLevel.id
# ── Dans-niveauer ─────────────────────────────────────────────────────────
for lvl in db.query(DanceLevel).all():
level_map[lvl.name.lower()] = lvl.id
# ── Sange ─────────────────────────────────────────────────────────────────
# ── Sange (globale) ───────────────────────────────────────────────────────
for s in payload.songs:
if not s.title:
continue
# Match 1: MBID — sikrest
existing = None
if s.mbid:
existing = db.query(Song).filter_by(mbid=s.mbid).first()
# Match 2: titel+artist globalt
if not existing:
existing = db.query(Song).filter(
Song.title == s.title,
Song.artist == s.artist,
).first()
if existing:
song_id_map[s.local_id] = existing.id
# Opdater BPM og MBID hvis de mangler
if s.bpm and not existing.bpm:
existing.bpm = s.bpm
if s.mbid and not existing.mbid:
existing.mbid = s.mbid
if s.acoustid and not existing.acoustid:
existing.acoustid = s.acoustid
else:
song = Song(
owner_id=me.id,
title=s.title, artist=s.artist, album=s.album,
bpm=s.bpm, duration_sec=s.duration_sec,
file_format=s.file_format,
mbid=s.mbid or None,
acoustid=s.acoustid or None,
song = _find_or_create_song(
db, s.title, s.artist,
mbid=s.mbid, acoustid=s.acoustid,
album=s.album, bpm=s.bpm, duration_sec=s.duration_sec,
)
db.add(song)
db.flush()
song_id_map[s.local_id] = song.id
# ── Danse ─────────────────────────────────────────────────────────────────
# ── Danse ─────────────────────────────────────────────────────────────────
for d in payload.dances:
level_id = level_map.get(d.level_name.lower()) if d.level_name else None
key = f"{d.name.lower()}|{level_id}"
existing = db.query(Dance).filter_by(name=d.name, level_id=level_id).first()
if existing:
# Opdater info hvis den har ny data
if d.choreographer: existing.choreographer = d.choreographer
if d.video_url: existing.video_url = d.video_url
if d.stepsheet_url: existing.stepsheet_url = d.stepsheet_url
if d.notes: existing.notes = d.notes
dance_id_map[key] = existing.id
else:
dance = Dance(
name=d.name, level_id=level_id,
choreographer=d.choreographer, video_url=d.video_url,
stepsheet_url=d.stepsheet_url, notes=d.notes,
choreographer=d.choreographer,
video_url=d.video_url,
stepsheet_url=d.stepsheet_url,
notes=d.notes,
)
db.add(dance)
db.flush()
dance_id_map[key] = dance.id
# ── Sang-dans tags (brugerens egne) ───────────────────────────────────────
from app.models import SongDance, SongAltDance
# ── Sang-dans tags ────────────────────────────────────────────────────────
from app.models import SongDance, SongAltDance
import sqlalchemy as _sa
for sd in payload.song_dances:
song_id = song_id_map.get(sd.song_local_id)
if not song_id:
@@ -172,12 +198,8 @@ def push(
db.execute(_sa.text(
"INSERT IGNORE INTO song_dances (id, song_id, dance_id, dance_order) "
"VALUES (:id, :song_id, :dance_id, :dance_order)"
), {
"id": str(__import__("uuid").uuid4()),
"song_id": song_id,
"dance_id": dance_id,
"dance_order": sd.dance_order,
})
), {"id": str(uuid.uuid4()), "song_id": song_id,
"dance_id": dance_id, "dance_order": sd.dance_order})
for sa in payload.song_alts:
song_id = song_id_map.get(sa.song_local_id)
@@ -191,35 +213,27 @@ def push(
db.execute(_sa.text(
"INSERT IGNORE INTO song_alt_dances (id, song_id, dance_id, note) "
"VALUES (:id, :song_id, :dance_id, :note)"
), {
"id": str(__import__("uuid").uuid4()),
"song_id": song_id,
"dance_id": dance_id,
"note": sa.note or "",
})
), {"id": str(uuid.uuid4()), "song_id": song_id,
"dance_id": dance_id, "note": sa.note or ""})
# ── Playlister ────────────────────────────────────────────────────────────
# VIGTIGT: Match altid på local_id (= api_project_id på klienten),
# aldrig på navn — navn er ikke unikt og giver duplikater.
playlist_id_map = {}
for pl in payload.playlists:
# Prøv først at finde via server-ID (local_id er klientens lokale db-id
# som tidligere er returneret som server-ID via playlist_id_map)
# Find eksisterende via server-ID (local_id er api_project_id på klienten)
existing = None
if pl.local_id:
existing = db.query(Project).filter_by(
id=pl.local_id, owner_id=me.id
).first()
# Fallback: navn — kun hvis vi aldrig har set denne liste før
if not existing:
existing = db.query(Project).filter_by(
owner_id=me.id, name=pl.name
).first()
if existing:
existing.name = pl.name
existing.description = pl.description
existing.visibility = pl.visibility
# Opdater kun sange hvis push faktisk har sange med
if pl.songs:
db.query(ProjectSong).filter_by(project_id=existing.id).delete()
project = existing
@@ -233,27 +247,21 @@ def push(
playlist_id_map[pl.local_id] = project.id
for ps in pl.songs:
# Prøv først via song_id_map (lokal ID)
# Find sang via song_id_map eller titel+artist
song_id = song_id_map.get(ps.song_local_id)
# Fallback: match på titel+artist
if not song_id and ps.song_title:
existing_song = db.query(Song).filter_by(
title=ps.song_title, artist=ps.song_artist
).first()
if existing_song:
song_id = existing_song.id
song = _find_or_create_song(db, ps.song_title, ps.song_artist)
song_id = song.id
if not song_id:
continue
proj_song = ProjectSong(
db.add(ProjectSong(
project_id=project.id, song_id=song_id,
position=ps.position, status=ps.status,
is_workshop=ps.is_workshop,
dance_override=ps.dance_override,
)
db.add(proj_song)
))
# ── Slet playlister der er fjernet lokalt ─────────────────────────────────
# Klienten sender api_project_id (= server Project.id) som strings
# ── Slet playlister ───────────────────────────────────────────────────────
for project_id in payload.deleted_playlists:
proj = db.query(Project).filter_by(id=project_id, owner_id=me.id).first()
if proj:
@@ -266,6 +274,7 @@ def push(
"status": "ok",
"songs_synced": len(song_id_map),
"playlists_synced": len(playlist_id_map),
"song_id_map": {k: str(v) for k, v in song_id_map.items()},
"playlist_id_map": {k: str(v) for k, v in playlist_id_map.items()},
}
@@ -285,7 +294,7 @@ def pull(
for l in db.query(DanceLevel).order_by(DanceLevel.sort_order).all()
]
# Danse med info
# Danse
dances = [
{
"name": d.name,
@@ -299,85 +308,79 @@ def pull(
for d in db.query(Dance).order_by(Dance.use_count.desc()).limit(500).all()
]
# Community dans-tags (populære)
community = []
for cd in db.query(CommunityDance).limit(1000).all():
community.append({
"song_title": cd.song_title,
"song_artist": cd.song_artist,
"dance_id": cd.dance_id,
})
# Delte playlister (read-only — kun ejeren kan redigere)
shared_ids = set()
for s in db.query(PlaylistShare).filter(
# Delte playlister
shared_ids = {
s.project_id for s in db.query(PlaylistShare).filter(
(PlaylistShare.shared_with_id == me.id) |
(PlaylistShare.invited_email == me.email)
).all():
shared_ids.add(s.project_id)
).all()
}
shared = []
for p in db.query(Project).filter(Project.id.in_(shared_ids)).all():
if p.owner_id == me.id:
continue # Egne lister håndteres separat
owner = db.query(User).filter_by(id=p.owner_id).first()
songs_out = []
for ps in p.project_songs:
song = db.query(Song).filter_by(id=ps.song_id).first()
if not song:
continue
songs_out.append({
"title": song.title,
"artist": song.artist,
"position": ps.position,
"status": ps.status,
"is_workshop": ps.is_workshop,
"dance_override": ps.dance_override or "",
})
owner = db.query(User).filter_by(id=p.owner_id).first()
shared.append({
"server_id": p.id,
"name": p.name,
"owner": owner.username if owner else "?",
"songs": sorted(songs_out, key=lambda x: x["position"]),
})
# Egne playlister
my_playlists = []
all_projects = db.query(Project).filter_by(owner_id=me.id).all()
import logging
logging.getLogger(__name__).info(f"Pull: fandt {len(all_projects)} projekter for {me.id}")
for p in all_projects:
songs_out = []
for ps in p.project_songs:
song = db.query(Song).filter_by(id=ps.song_id).first()
if not song:
continue
songs_out.append({
"title": song.title,
"artist": song.artist,
"songs": [
{
"song_id": str(ps.song_id),
"title": ps.song.title,
"artist": ps.song.artist,
"mbid": ps.song.mbid or "",
"acoustid": ps.song.acoustid or "",
"bpm": ps.song.bpm,
"duration_sec": ps.song.duration_sec,
"position": ps.position,
"status": ps.status,
"is_workshop": ps.is_workshop,
"dance_override": ps.dance_override or "",
}
for ps in sorted(p.project_songs, key=lambda x: x.position)
if ps.song
],
})
# Egne playlister
my_playlists = []
for p in db.query(Project).filter_by(owner_id=me.id).all():
my_playlists.append({
"server_id": p.id,
"name": p.name,
"description": p.description or "",
"songs": sorted(songs_out, key=lambda x: x["position"]),
"songs": [
{
"song_id": str(ps.song_id),
"title": ps.song.title,
"artist": ps.song.artist,
"mbid": ps.song.mbid or "",
"acoustid": ps.song.acoustid or "",
"bpm": ps.song.bpm,
"duration_sec": ps.song.duration_sec,
"position": ps.position,
"status": ps.status,
"is_workshop": ps.is_workshop,
"dance_override": ps.dance_override or "",
}
for ps in sorted(p.project_songs, key=lambda x: x.position)
if ps.song
],
})
# Brugerens egne dans-tags
from app.models import SongDance, SongAltDance
logger.info(f"Pull: {len(my_playlists)} playlister for {me.username}")
# Dans-tags (brugerens egne)
song_tags = []
for sd in db.query(SongDance).join(Song).filter(Song.owner_id == me.id).all():
for sd in db.query(SongDance).all():
dance = db.query(Dance).filter_by(id=sd.dance_id).first()
if not dance:
continue
level = db.query(DanceLevel).filter_by(id=dance.level_id).first() if dance.level_id else None
song_tags.append({
"song_title": sd.song.title,
"song_artist": sd.song.artist,
"song_id": sd.song_id,
"dance_name": dance.name,
"level_name": level.name if level else "",
"dance_order": sd.dance_order,
@@ -386,7 +389,6 @@ def pull(
return {
"levels": levels,
"dances": dances,
"community": community,
"shared": shared,
"my_playlists": my_playlists,
"song_tags": song_tags,

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,11 @@
"""
scanner.py — Scanning af musikbiblioteker i baggrunden.
scanner.py — Scanning af musikbiblioteker i baggrunden. v0.9
Kører som en separat subprocess der scanner ét bibliotek ad gangen
og rapporterer fremgang via stdout JSON-linjer.
Kan også importeres direkte og bruges via ScanWorker QThread.
Skriver til files-tabellen og finder/opretter sange i songs-tabellen.
"""
import os
import sys
import json
import sqlite3
import uuid
import logging
import time
from pathlib import Path
logger = logging.getLogger(__name__)
@@ -19,8 +13,8 @@ logger = logging.getLogger(__name__)
SUPPORTED = {'.mp3', '.flac', '.m4a', '.ogg', '.wav', '.aiff', '.wma'}
def is_supported(path: Path) -> bool:
return path.suffix.lower() in SUPPORTED
def is_supported(path) -> bool:
return Path(path).suffix.lower() in SUPPORTED
def get_file_mtime(path: Path) -> str:
@@ -32,29 +26,28 @@ def get_file_mtime(path: Path) -> str:
def scan_library(library_id: int, library_path: str, db_path: str,
overwrite_bpm: bool = False,
progress_callback=None):
progress_callback=None) -> int:
"""
Scan ét bibliotek og upsert sange til SQLite.
progress_callback(done, total, current_file) kaldes løbende.
Scan ét bibliotek og upsert til files + songs tabellerne.
Returnerer antal scannede filer.
"""
import sqlite3
from local.tag_reader import read_tags
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
from local.local_db import find_or_create_song, upsert_file
base = Path(library_path)
if not base.exists():
conn.close()
return 0
# Byg indeks over kendte filer
# Byg indeks over kendte filer (path → mtime)
conn = sqlite3.connect(db_path, timeout=10)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
known = {}
for row in conn.execute(
"SELECT local_path, file_modified_at, file_missing FROM songs WHERE library_id=?",
(library_id,)
"SELECT local_path, file_modified_at FROM files WHERE file_missing=0"
).fetchall():
# Sange markeret som manglende medtages ikke i known — de skal altid genscanes
if not row["file_missing"]:
known[row["local_path"]] = row["file_modified_at"]
# Find alle musikfiler
@@ -68,8 +61,6 @@ def scan_library(library_id: int, library_path: str, db_path: str,
total = len(all_files)
done = 0
import time
for fp in all_files:
path_str = str(fp)
mtime = get_file_mtime(fp)
@@ -77,108 +68,55 @@ def scan_library(library_id: int, library_path: str, db_path: str,
if progress_callback:
progress_callback(done, total, fp.name)
# Spring over hvis ikke ændret
# Spring over uændrede filer
if path_str in known and known[path_str] == mtime:
done += 1
# Yield hvert 100. fil så andre tråde kan køre
if done % 100 == 0:
time.sleep(0.005)
continue
try:
tags = read_tags(fp)
extra = json.dumps(tags.get("extra_tags", {}), ensure_ascii=False)
# Match 0: MBID-match — sikrest mulige match
existing = None
mbid_from_file = tags.get("mbid", "")
if mbid_from_file:
existing = conn.execute(
"SELECT id, bpm FROM songs WHERE mbid=? LIMIT 1",
(mbid_from_file,)
).fetchone()
if existing:
conn.execute(
"UPDATE songs SET local_path=? WHERE id=?",
(path_str, existing["id"])
)
# Match 1: præcis sti-match
if not existing:
existing = conn.execute(
"SELECT id, bpm FROM songs WHERE local_path=?", (path_str,)
).fetchone()
# Match 2: titel+artist match — fil er flyttet eller var missing
if not existing:
title = tags.get("title", "")
tags = read_tags(str(fp))
title = tags.get("title", "") or fp.stem
artist = tags.get("artist", "")
if title:
# Prioritér file_missing=1 sange, men tag også sange med ugyldig sti
existing = conn.execute("""
SELECT id, bpm FROM songs
WHERE title=? AND artist=? AND file_missing=1
LIMIT 1
""", (title, artist)).fetchone()
if not existing:
# Tjek om der er en sang med samme titel+artist men ugyldig sti
existing = conn.execute("""
SELECT id, bpm, local_path FROM songs
WHERE title=? AND artist=? AND file_missing=0
LIMIT 1
""", (title, artist)).fetchone()
if existing:
from pathlib import Path as _Path
old_path = existing["local_path"] or ""
if old_path and not _Path(old_path).exists():
pass # Sti er ugyldig — brug dette match
else:
existing = None # Sti er valid — det er en anden fil
album = tags.get("album", "")
bpm = tags.get("bpm", 0)
mbid = tags.get("mbid", "")
acoustid = tags.get("acoustid", "")
duration_sec = tags.get("duration_sec", 0)
file_format = tags.get("file_format", fp.suffix.lstrip(".").lower())
extra_tags = tags.get("extra_tags", "{}")
if existing:
# Opdater stien så den peger på den nye placering
conn.execute(
"UPDATE songs SET local_path=? WHERE id=?",
(path_str, existing["id"])
# Find eller opret sang i global katalog
song_id = find_or_create_song(
title=title, artist=artist, album=album,
bpm=bpm, duration_sec=duration_sec,
mbid=mbid, acoustid=acoustid,
)
if existing:
bpm = tags.get("bpm", 0)
if not overwrite_bpm and existing["bpm"] and existing["bpm"] > 0:
bpm = existing["bpm"] # behold eksisterende BPM
mbid = tags.get("mbid", "")
conn.execute("""
UPDATE songs SET
library_id=?, title=?, artist=?, album=?,
bpm=?, duration_sec=?, file_format=?,
file_modified_at=?, file_missing=0, extra_tags=?,
mbid=CASE WHEN ? != '' THEN ? ELSE mbid END
WHERE id=?
""", (library_id, tags.get("title",""), tags.get("artist",""),
tags.get("album",""), bpm, tags.get("duration_sec",0),
tags.get("file_format",""), mtime, extra,
mbid, mbid, existing["id"]))
song_id = existing["id"]
else:
song_id = str(uuid.uuid4())
conn.execute("""
INSERT OR IGNORE INTO songs
(id, library_id, local_path, title, artist, album,
bpm, duration_sec, file_format, file_modified_at, extra_tags, mbid)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?)
""", (song_id, library_id, path_str,
tags.get("title",""), tags.get("artist",""),
tags.get("album",""), tags.get("bpm",0),
tags.get("duration_sec",0), tags.get("file_format",""),
mtime, extra, tags.get("mbid","")))
# Opdater BPM på sangen hvis vi har bedre data
if bpm and bpm > 0:
conn.execute(
"UPDATE songs SET bpm=? WHERE id=? AND (bpm=0 OR bpm IS NULL)",
(bpm, song_id)
)
# Importer dans-tags fra filen hvis de ikke allerede er i DB
# Opret eller opdater fil-post
upsert_file(
song_id=song_id,
local_path=path_str,
file_format=file_format,
file_modified_at=mtime,
extra_tags=extra_tags,
)
# Dans-tags fra fil
file_dances = tags.get("dances", [])
if file_dances:
existing_dances = conn.execute(
existing_count = conn.execute(
"SELECT COUNT(*) FROM song_dances WHERE song_id=?", (song_id,)
).fetchone()[0]
if existing_dances == 0:
if existing_count == 0:
import uuid
for order, dance_name in enumerate(file_dances, start=1):
dance_row = conn.execute(
"SELECT id FROM dances WHERE name=? COLLATE NOCASE LIMIT 1",
@@ -192,64 +130,25 @@ def scan_library(library_id: int, library_path: str, db_path: str,
else:
dance_id = dance_row["id"]
conn.execute(
"INSERT OR IGNORE INTO song_dances (song_id, dance_id, dance_order) VALUES (?,?,?)",
(song_id, dance_id, order)
"INSERT OR IGNORE INTO song_dances (id, song_id, dance_id, dance_order) VALUES (?,?,?,?)",
(str(uuid.uuid4()), song_id, dance_id, order)
)
conn.commit()
except Exception as e:
# UNIQUE constraint er forventet og ufarlig — sang findes allerede
if "UNIQUE constraint" in str(e):
logger.debug(f"Sang allerede i DB: {fp.name}")
else:
logger.warning(f"Scan fejl {fp.name}: {e}")
done += 1
# Lille pause efter hver scannet fil så GUI ikke hænger
time.sleep(0.02)
# Marker manglende filer
for path_str in known:
if not Path(path_str).exists():
conn.execute(
"UPDATE songs SET file_missing=1 WHERE local_path=?", (path_str,)
)
conn.commit()
conn.execute(
"UPDATE libraries SET last_full_scan=datetime('now') WHERE id=?",
(library_id,)
"UPDATE files SET file_missing=1 WHERE local_path=?", (path_str,)
)
conn.commit()
conn.close()
logger.info(f"Scan færdig: {done} filer i {library_path}")
return done
# ── Subprocess entry point ─────────────────────────────────────────────────────
if __name__ == "__main__":
"""
Kørsel som subprocess:
python scanner.py <library_id> <library_path> <db_path>
Rapporterer JSON-linjer til stdout: {"done":N,"total":M,"file":"..."}
"""
if len(sys.argv) < 4:
sys.exit(1)
lib_id = int(sys.argv[1])
lib_path = sys.argv[2]
db_path = sys.argv[3]
# Tilføj app-mappen til path så local.tag_reader kan importeres
app_dir = str(Path(__file__).parent.parent)
if app_dir not in sys.path:
sys.path.insert(0, app_dir)
def report(done, total, filename):
print(json.dumps({"done": done, "total": total, "file": filename}),
flush=True)
count = scan_library(lib_id, lib_path, db_path,
progress_callback=report)
print(json.dumps({"done": count, "total": count, "finished": True}),
flush=True)

View File

@@ -1,150 +1,122 @@
"""
sync_manager.py — Synkronisering mellem lokal SQLite og server API.
Kører i baggrundstråd — blokerer aldrig GUI.
sync_manager.py — Synkronisering mellem lokal database og server. v0.9
"""
import json
import logging
import sqlite3
import threading
import urllib.request
import urllib.error
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
class SyncManager:
def __init__(self, db_path: str, server_url: str, token: str):
self._db_path = db_path
self._server_url = server_url.rstrip("/")
self._token = token
self._lock = threading.Lock()
def _headers(self):
return {
"Content-Type": "application/json",
"Authorization": f"Bearer {self._token}",
}
def __init__(self, api_url: str, db_path: str):
self._api_url = api_url.rstrip("/")
self._db_path = db_path
self._token: str | None = None
def set_token(self, token: str):
self._token = token
# ── HTTP ──────────────────────────────────────────────────────────────────
def _post(self, path: str, data: dict) -> dict:
body = json.dumps(data).encode("utf-8")
body = json.dumps(data).encode()
req = urllib.request.Request(
f"{self._server_url}{path}", data=body,
headers=self._headers(), method="POST"
f"{self._api_url}{path}",
data=body,
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {self._token}",
},
method="POST",
)
try:
with urllib.request.urlopen(req, timeout=30) as resp:
return json.loads(resp.read())
except urllib.error.HTTPError as e:
detail = e.read().decode("utf-8", errors="replace")
detail = e.read().decode()
raise Exception(f"HTTP {e.code}: {detail}")
def _get(self, path: str) -> dict:
req = urllib.request.Request(
f"{self._server_url}{path}",
headers=self._headers(), method="GET"
f"{self._api_url}{path}",
headers={"Authorization": f"Bearer {self._token}"},
)
try:
with urllib.request.urlopen(req, timeout=30) as resp:
return json.loads(resp.read())
except urllib.error.HTTPError as e:
detail = e.read().decode()
raise Exception(f"HTTP {e.code}: {detail}")
# ── Push ──────────────────────────────────────────────────────────────────
def push(self, on_done=None, on_error=None):
"""Push lokal data til server i baggrundstråd."""
def _run():
try:
payload = self._build_push_payload()
logger.info(f"Push OK: {len(payload['songs'])} sange")
logger.info(f"Push: {len(payload['songs'])} sange, "
f"{len(payload['playlists'])} playlister")
result = self._post("/sync/push", payload)
self._save_playlist_ids(result.get("playlist_id_map", {}))
# Fjern soft-slettede playlister permanent efter succesfuld push
if payload.get("deleted_playlists"):
conn = sqlite3.connect(self._db_path)
conn.execute(
"DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL"
self._save_server_ids(
result.get("song_id_map", {}),
result.get("playlist_id_map", {}),
)
conn.commit()
conn.close()
logger.info(f"Push OK: {result.get('songs_synced', '?')} sange synkroniseret")
logger.info(f"Push OK: {result.get('songs_synced','?')} sange synkroniseret")
if on_done:
on_done(result)
except Exception as e:
logger.error(f"Sync push fejl: {e}", exc_info=True)
logger.error(f"Push fejl: {e}", exc_info=True)
if on_error:
on_error(str(e))
threading.Thread(target=_run, daemon=True).start()
def _save_playlist_ids(self, id_map: dict):
"""Gem server-IDs (api_project_id) på lokale playlister."""
if not id_map:
return
conn = sqlite3.connect(self._db_path)
for local_id, server_id in id_map.items():
try:
conn.execute(
"UPDATE playlists SET api_project_id=? WHERE id=?",
(server_id, int(local_id))
)
except Exception:
pass
conn.commit()
conn.close()
def pull(self, on_done=None, on_error=None):
"""Pull server-data ned i baggrundstråd."""
def _run():
try:
result = self._get("/sync/pull")
pl_count = len(result.get("my_playlists", []))
logger.info(f"Pull OK: {pl_count} playlister")
self._apply_pull(result)
if on_done:
on_done(result)
except Exception as e:
logger.error(f"Sync pull fejl: {e}", exc_info=True)
if on_error:
on_error(str(e))
threading.Thread(target=_run, daemon=True).start()
# ── Push + Pull ───────────────────────────────────────────────────────────
def push_and_pull(self, on_done=None, on_error=None):
"""Push FØR pull — så sletninger når serveren inden pull henter data ned."""
def _run():
try:
# 1. Push lokal data op — inkl. sletninger
# 1. Push
payload = self._build_push_payload()
deleted = payload.get("deleted_playlists", [])
logger.info(f"Sync push — {len(payload['songs'])} sange, "
f"{len(payload['playlists'])} playlister, "
f"sletter {len(deleted)}: {deleted}")
push_result = self._post("/sync/push", payload)
self._save_playlist_ids(push_result.get("playlist_id_map", {}))
self._save_server_ids(
push_result.get("song_id_map", {}),
push_result.get("playlist_id_map", {}),
)
logger.info(f"Push svar: status={push_result.get('status')}, "
f"sange={push_result.get('songs_synced', 0)}, "
f"playlister={push_result.get('playlists_synced', 0)}")
# 2. Pull — sletninger er nu gennemført på serveren.
# _apply_pull filtrerer is_deleted=1 rækker fra automatisk.
# 2. Pull
pull_result = self._get("/sync/pull")
pl_names = [p.get("name") for p in pull_result.get("my_playlists", [])]
logger.info(f"Pull modtog {len(pl_names)} playlister: {pl_names}")
self._apply_pull(pull_result)
# Fjern soft-slettede playlister permanent nu serveren er opdateret
# 3. Fjern soft-slettede permanent efter succesfuld sync
if deleted:
conn = sqlite3.connect(self._db_path)
conn = sqlite3.connect(self._db_path, timeout=10)
conn.execute("PRAGMA journal_mode=WAL")
conn.execute(
"DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL"
)
conn.commit()
conn.close()
logger.info(f"Soft-slettede playlister fjernet lokalt efter sync")
logger.info("Soft-slettede playlister fjernet lokalt efter sync")
pl_count = len(pull_result.get("my_playlists", []))
logger.info(
f"Sync OK — {len(payload['songs'])} sange, "
logger.info(f"Sync OK — {len(payload['songs'])} sange, "
f"{len(payload['playlists'])} playlister, "
f"{pl_count} server-playlister"
)
f"{pl_count} server-playlister")
if on_done:
on_done({"push": push_result, "pull": pull_result})
except Exception as e:
@@ -156,34 +128,36 @@ class SyncManager:
# ── Byg payload ───────────────────────────────────────────────────────────
def _build_push_payload(self) -> dict:
conn = sqlite3.connect(self._db_path)
conn = sqlite3.connect(self._db_path, timeout=10)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
# Sange
# Sange (dem der har filer — altså kendes lokalt)
songs = []
for row in conn.execute(
"SELECT id, title, artist, album, bpm, duration_sec, file_format, mbid, acoustid "
"FROM songs WHERE file_missing=0"
).fetchall():
for row in conn.execute("""
SELECT DISTINCT s.id, s.title, s.artist, s.album,
s.bpm, s.duration_sec, s.mbid, s.acoustid, s.server_synced
FROM songs s
JOIN files f ON f.song_id = s.id AND f.file_missing = 0
""").fetchall():
songs.append({
"local_id": str(row["id"]),
"local_id": row["id"],
"title": row["title"] or "",
"artist": row["artist"] or "",
"album": row["album"] or "",
"bpm": row["bpm"] or 0,
"duration_sec": row["duration_sec"] or 0,
"file_format": row["file_format"] or "",
"mbid": row["mbid"] or "",
"acoustid": row["acoustid"] or "",
})
# Danse
dances = []
for row in conn.execute(
"SELECT d.name, dl.name as level_name, d.choreographer, "
"d.video_url, d.stepsheet_url, d.notes "
"FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id"
).fetchall():
for row in conn.execute("""
SELECT d.name, dl.name as level_name, d.choreographer,
d.video_url, d.stepsheet_url, d.notes
FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id
""").fetchall():
dances.append({
"name": row["name"] or "",
"level_name": row["level_name"] or "",
@@ -193,16 +167,17 @@ class SyncManager:
"notes": row["notes"] or "",
})
# Dans-tags per sang
# Dans-tags
song_dances = []
for row in conn.execute("""
SELECT sd.song_id, d.name as dance_name, dl.name as level_name, sd.dance_order
SELECT sd.song_id, d.name as dance_name,
dl.name as level_name, sd.dance_order
FROM song_dances sd
JOIN dances d ON d.id = sd.dance_id
LEFT JOIN dance_levels dl ON dl.id = d.level_id
""").fetchall():
song_dances.append({
"song_local_id": str(row["song_id"]),
"song_local_id": row["song_id"],
"dance_name": row["dance_name"],
"level_name": row["level_name"] or "",
"dance_order": row["dance_order"],
@@ -211,36 +186,36 @@ class SyncManager:
# Alternativ-danse
song_alts = []
for row in conn.execute("""
SELECT sad.song_id, d.name as dance_name, dl.name as level_name, sad.note
SELECT sad.song_id, d.name as dance_name,
dl.name as level_name, sad.note
FROM song_alt_dances sad
JOIN dances d ON d.id = sad.dance_id
LEFT JOIN dance_levels dl ON dl.id = d.level_id
""").fetchall():
song_alts.append({
"song_local_id": str(row["song_id"]),
"song_local_id": row["song_id"],
"dance_name": row["dance_name"],
"level_name": row["level_name"] or "",
"note": row["note"] or "",
})
# Playlister — send alle (nye og eksisterende) til serveren.
# Brug api_project_id som local_id hvis den kendes — så serveren
# kan matche på ID og ikke oprette duplikater.
# Playlister — alle ikke-slettede
playlists = []
for pl in conn.execute(
"SELECT id, name, description, tags, api_project_id FROM playlists "
"WHERE name != '__aktiv__' AND is_deleted = 0"
).fetchall():
for pl in conn.execute("""
SELECT id, name, description, tags, api_project_id
FROM playlists
WHERE name != '__aktiv__' AND is_deleted = 0
""").fetchall():
pl_songs = []
for ps in conn.execute("""
SELECT s.id, s.title, s.artist,
SELECT s.id as song_id, s.title, s.artist,
ps.position, ps.status, ps.is_workshop, ps.dance_override
FROM playlist_songs ps
JOIN songs s ON s.id = ps.song_id
WHERE ps.playlist_id=? ORDER BY ps.position
""", (pl["id"],)).fetchall():
pl_songs.append({
"song_local_id": str(ps["id"]),
"song_local_id": ps["song_id"],
"song_title": ps["title"] or "",
"song_artist": ps["artist"] or "",
"position": int(ps["position"] or 1),
@@ -248,9 +223,8 @@ class SyncManager:
"is_workshop": bool(ps["is_workshop"]),
"dance_override": ps["dance_override"] or "",
})
# Brug api_project_id som local_id hvis den kendes —
# serveren bruger dette til at finde eksisterende liste
local_id = pl["api_project_id"] or str(pl["id"])
# Brug api_project_id som local_id hvis kendt
local_id = pl["api_project_id"] or pl["id"]
playlists.append({
"local_id": local_id,
"name": pl["name"],
@@ -260,9 +234,7 @@ class SyncManager:
"songs": pl_songs,
})
# Slettede playlister — skal fjernes fra serveren.
# Serveren forventer en liste af strings (api_project_id).
# Kun playlister der faktisk er nået serveren (har api_project_id).
# Slettede playlister
deleted = [
row["api_project_id"]
for row in conn.execute(
@@ -281,21 +253,82 @@ class SyncManager:
"deleted_playlists": deleted,
}
# ── Gem server-IDs ────────────────────────────────────────────────────────
def _save_server_ids(self, song_id_map: dict, playlist_id_map: dict):
"""
Gem server-IDs lokalt.
song_id_map: lokal_song_id → server_song_id
playlist_id_map: lokal_pl_id → server_pl_id
"""
if not song_id_map and not playlist_id_map:
return
conn = sqlite3.connect(self._db_path, timeout=10)
conn.execute("PRAGMA journal_mode=WAL")
# Sange: hvis server gav et andet ID end det lokale, opdater
for local_id, server_id in song_id_map.items():
if local_id != server_id:
# Tjek om server-ID allerede eksisterer
existing = conn.execute(
"SELECT id FROM songs WHERE id=?", (server_id,)
).fetchone()
if not existing:
# Opdater lokal sang til server-ID
conn.execute(
"UPDATE songs SET id=?, server_synced=1 WHERE id=?",
(server_id, local_id)
)
# Opdater referencer
conn.execute(
"UPDATE files SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
conn.execute(
"UPDATE playlist_songs SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
conn.execute(
"UPDATE song_dances SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
conn.execute(
"UPDATE song_alt_dances SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
else:
conn.execute(
"UPDATE songs SET server_synced=1 WHERE id=?", (local_id,)
)
# Playlister
for local_id, server_id in playlist_id_map.items():
conn.execute(
"UPDATE playlists SET api_project_id=? WHERE id=? OR api_project_id=?",
(server_id, local_id, local_id)
)
conn.commit()
conn.close()
# ── Anvend pull ───────────────────────────────────────────────────────────
def _apply_pull(self, data: dict):
"""Gem server-data lokalt — opdaterer dans-info og importerer playlister."""
conn = sqlite3.connect(self._db_path)
"""Gem server-data lokalt."""
import uuid
conn = sqlite3.connect(self._db_path, timeout=10)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
# Opdater dans-info fra server
try:
# Opdater dans-info
for d in data.get("dances", []):
if not d.get("name"):
continue
existing = conn.execute(
"SELECT id FROM dances WHERE name=? COLLATE NOCASE", (d["name"],)
).fetchone()
if existing and (d.get("choreographer") or d.get("video_url") or d.get("stepsheet_url")):
if existing and (d.get("choreographer") or d.get("video_url")):
conn.execute("""
UPDATE dances SET
choreographer = CASE WHEN choreographer='' THEN ? ELSE choreographer END,
@@ -305,8 +338,7 @@ class SyncManager:
""", (d.get("choreographer",""), d.get("video_url",""),
d.get("stepsheet_url",""), existing["id"]))
# Importer/opdater egne playlister fra server — server er sandhed
# Hent server-IDs på soft-slettede playlister så vi springer dem over
# Hent soft-slettede server-IDs så vi springer dem over
deleted_server_ids = {
row["api_project_id"]
for row in conn.execute(
@@ -315,13 +347,12 @@ class SyncManager:
).fetchall()
}
# Importer egne playlister
for pl in data.get("my_playlists", []):
server_id = pl.get("server_id")
name = pl.get("name", "")
if not server_id or not name:
continue
# Spring over hvis listen er soft-slettet lokalt
if server_id in deleted_server_ids:
continue
@@ -331,52 +362,59 @@ class SyncManager:
if existing:
pl_id = existing["id"]
# Opdater navn hvis det er ændret på serveren
conn.execute(
"UPDATE playlists SET name=? WHERE id=?", (name, pl_id)
)
else:
cur = conn.execute(
"INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) "
"VALUES (?,?,?,1,'edit')",
(name, pl.get("description",""), server_id)
pl_id = str(uuid.uuid4())
conn.execute(
"INSERT INTO playlists (id, name, description, api_project_id, is_linked, server_permission) "
"VALUES (?,?,?,?,1,'edit')",
(pl_id, name, pl.get("description",""), server_id)
)
pl_id = cur.lastrowid
# Genindlæs sange fra serveren — server er sandhed
# Genindlæs sange
conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,))
position = 1
for song_data in pl.get("songs", []):
songs_from_server = pl.get("songs", [])
logger.info(f"Pull: liste '{name}' har {len(songs_from_server)} sange")
for song_data in songs_from_server:
server_song_id = song_data.get("song_id", "")
title = song_data.get("title", "")
artist = song_data.get("artist", "")
if not title:
mbid = song_data.get("mbid", "")
acoustid = song_data.get("acoustid", "")
if not title and not server_song_id:
continue
local = conn.execute(
"SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1",
(title, artist)
).fetchone()
if not local:
import uuid
new_id = str(uuid.uuid4())
conn.execute(
"INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)",
(new_id, title, artist)
# Find eller opret sang lokalt
local_song_id = self._find_or_create_song_local(
conn, server_song_id, title, artist,
mbid=mbid, acoustid=acoustid,
bpm=song_data.get("bpm", 0),
duration_sec=song_data.get("duration_sec", 0),
)
local_id = new_id
else:
local_id = local["id"]
# Find tilgængelig fil til denne sang
file_row = conn.execute(
"SELECT id FROM files WHERE song_id=? AND file_missing=0 LIMIT 1",
(local_song_id,)
).fetchone()
file_id = file_row["id"] if file_row else None
conn.execute("""
INSERT OR IGNORE INTO playlist_songs
(playlist_id, song_id, position, status, is_workshop, dance_override)
VALUES (?,?,?,?,?,?)
""", (pl_id, local_id, position,
INSERT INTO playlist_songs
(id, playlist_id, song_id, file_id, position, status, is_workshop, dance_override)
VALUES (?,?,?,?,?,?,?,?)
""", (str(uuid.uuid4()), pl_id, local_song_id, file_id, position,
song_data.get("status","pending"),
1 if song_data.get("is_workshop") else 0,
song_data.get("dance_override","") or ""))
position += 1
# Importer delte playlister (read-only — is_linked=1, server_permission='view')
# Importer delte playlister
for pl in data.get("shared", []):
server_id = pl.get("server_id")
name = pl.get("name", "")
@@ -389,46 +427,98 @@ class SyncManager:
).fetchone()
if existing:
# Opdater sange fra server (ejer kan have ændret listen)
pl_id = existing["id"]
conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,))
else:
cur = conn.execute(
"INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) "
"VALUES (?,?,?,1,'view')",
(f"{name} ({owner})", "", server_id)
pl_id = str(uuid.uuid4())
conn.execute(
"INSERT INTO playlists (id, name, description, api_project_id, is_linked, server_permission) "
"VALUES (?,?,?,?,1,'view')",
(pl_id, f"{name} ({owner})", "", server_id)
)
pl_id = cur.lastrowid
position = 1
for song_data in pl.get("songs", []):
server_song_id = song_data.get("song_id", "")
title = song_data.get("title", "")
artist = song_data.get("artist", "")
if not title:
if not title and not server_song_id:
continue
local = conn.execute(
"SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1",
(title, artist)
).fetchone()
if not local:
import uuid
new_id = str(uuid.uuid4())
conn.execute(
"INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)",
(new_id, title, artist)
local_song_id = self._find_or_create_song_local(
conn, server_song_id, title, artist,
mbid=song_data.get("mbid", ""),
acoustid=song_data.get("acoustid", ""),
)
local_id = new_id
else:
local_id = local["id"]
file_row = conn.execute(
"SELECT id FROM files WHERE song_id=? AND file_missing=0 LIMIT 1",
(local_song_id,)
).fetchone()
file_id = file_row["id"] if file_row else None
conn.execute("""
INSERT OR IGNORE INTO playlist_songs
(playlist_id, song_id, position, status, is_workshop, dance_override)
VALUES (?,?,?,?,?,?)
""", (pl_id, local_id, position,
INSERT INTO playlist_songs
(id, playlist_id, song_id, file_id, position, status, is_workshop, dance_override)
VALUES (?,?,?,?,?,?,?,?)
""", (str(uuid.uuid4()), pl_id, local_song_id, file_id, position,
song_data.get("status","pending"),
1 if song_data.get("is_workshop") else 0,
song_data.get("dance_override","") or ""))
position += 1
conn.commit()
except Exception:
conn.rollback()
raise
finally:
conn.close()
def _find_or_create_song_local(self, conn, server_song_id: str, title: str,
artist: str = "", mbid: str = "",
acoustid: str = "", bpm: int = 0,
duration_sec: int = 0) -> str:
"""Find eller opret sang lokalt. Returnerer lokal song_id."""
import uuid
# Match på server-ID
if server_song_id:
row = conn.execute(
"SELECT id FROM songs WHERE id=?", (server_song_id,)
).fetchone()
if row:
return row["id"]
# Match på MBID
if mbid:
row = conn.execute(
"SELECT id FROM songs WHERE mbid=?", (mbid,)
).fetchone()
if row:
return row["id"]
# Match på AcoustID
if acoustid:
row = conn.execute(
"SELECT id FROM songs WHERE acoustid=?", (acoustid,)
).fetchone()
if row:
return row["id"]
# Match på titel + artist
if title:
row = conn.execute(
"SELECT id FROM songs WHERE title=? AND artist=?", (title, artist)
).fetchone()
if row:
return row["id"]
# Opret ny — brug server-ID hvis tilgængeligt
new_id = server_song_id or str(uuid.uuid4())
conn.execute(
"INSERT INTO songs (id, title, artist, bpm, duration_sec, mbid, acoustid, server_synced) "
"VALUES (?,?,?,?,?,?,?,1)",
(new_id, title, artist, bpm, duration_sec, mbid or None, acoustid or None)
)
logger.info(f"Pull: oprettet sang '{title}' ({new_id})")
return new_id

View File

@@ -406,9 +406,13 @@ class PlaylistPanel(QWidget):
return False
# Hent sange med status, workshop og dans-override
# JOIN songs — sangen er altid i songs tabellen (oprettet ved pull med file_missing=1)
# file_missing betyder bare at filen ikke er på denne maskine
songs_raw = conn.execute("""
SELECT s.*, ps.position, ps.status,
ps.is_workshop, ps.dance_override
SELECT s.id, s.title, s.artist, s.album,
s.bpm, s.duration_sec, s.file_format,
s.local_path, s.file_missing,
ps.position, ps.status, ps.is_workshop, ps.dance_override
FROM playlist_songs ps
JOIN songs s ON s.id = ps.song_id
WHERE ps.playlist_id=? ORDER BY ps.position
@@ -426,10 +430,10 @@ class PlaylistPanel(QWidget):
override = row["dance_override"] or ""
active_dance = override if override else (dance_names[0] if dance_names else "")
local_path = row["local_path"]
local_path = row["local_path"] or ""
file_missing = bool(row["file_missing"])
# Forsøg at finde sangen lokalt hvis den mangler
# Forsøg at finde filen lokalt hvis den mangler på denne maskine
if file_missing or not local_path:
match = conn.execute("""
SELECT local_path FROM songs
@@ -444,11 +448,11 @@ class PlaylistPanel(QWidget):
"id": row["id"],
"title": row["title"],
"artist": row["artist"],
"album": row["album"],
"bpm": row["bpm"],
"duration_sec": row["duration_sec"],
"album": row["album"] or "",
"bpm": row["bpm"] or 0,
"duration_sec": row["duration_sec"] or 0,
"local_path": local_path,
"file_format": row["file_format"],
"file_format": row["file_format"] or "",
"file_missing": file_missing,
"dances": dance_names,
"active_dance": active_dance,
@@ -605,9 +609,12 @@ class PlaylistPanel(QWidget):
else:
self._can_edit_server = False
with get_db() as conn:
# JOIN songs — sangen er altid i songs tabellen (oprettet ved pull med file_missing=1)
songs_raw = conn.execute("""
SELECT s.*, ps.position, ps.status,
ps.is_workshop, ps.dance_override
SELECT s.id, s.title, s.artist, s.album,
s.bpm, s.duration_sec, s.file_format,
s.local_path, s.file_missing,
ps.position, ps.status, ps.is_workshop, ps.dance_override
FROM playlist_songs ps
JOIN songs s ON s.id = ps.song_id
WHERE ps.playlist_id=? ORDER BY ps.position
@@ -618,17 +625,17 @@ class PlaylistPanel(QWidget):
for row in songs_raw:
dances = conn.execute("""
SELECT d.name FROM song_dances sd
JOIN dances d ON d.id = sd.dance_id
JOIN dances d ON d.id = sad.dance_id
WHERE sd.song_id=? ORDER BY sd.dance_order
""", (row["id"],)).fetchall()
dance_names = [d["name"] for d in dances]
override = row["dance_override"] or ""
active_dance = override if override else (dance_names[0] if dance_names else "")
local_path = row["local_path"]
local_path = row["local_path"] or ""
file_missing = bool(row["file_missing"])
# Forsøg at finde sangen lokalt hvis den mangler
# Forsøg at finde filen lokalt hvis den mangler på denne maskine
if file_missing or not local_path:
match = conn.execute("""
SELECT local_path FROM songs
@@ -644,11 +651,11 @@ class PlaylistPanel(QWidget):
"id": row["id"],
"title": row["title"],
"artist": row["artist"],
"album": row["album"],
"bpm": row["bpm"],
"duration_sec": row["duration_sec"],
"album": row["album"] or "",
"bpm": row["bpm"] or 0,
"duration_sec": row["duration_sec"] or 0,
"local_path": local_path,
"file_format": row["file_format"],
"file_format": row["file_format"] or "",
"file_missing": file_missing,
"dances": dance_names,
"active_dance": active_dance,