NY db struktur

This commit is contained in:
2026-04-19 23:45:59 +02:00
parent a9aa451d63
commit efc30cdbb2
6 changed files with 1056 additions and 1390 deletions

View File

@@ -28,29 +28,25 @@ class User(Base):
projects: Mapped[list["Project"]] = relationship("Project", back_populates="owner") projects: Mapped[list["Project"]] = relationship("Project", back_populates="owner")
memberships: Mapped[list["ProjectMember"]] = relationship("ProjectMember", back_populates="user") memberships: Mapped[list["ProjectMember"]] = relationship("ProjectMember", back_populates="user")
songs: Mapped[list["Song"]] = relationship("Song", back_populates="owner")
alt_ratings: Mapped[list["DanceAltRating"]] = relationship("DanceAltRating", back_populates="user") alt_ratings: Mapped[list["DanceAltRating"]] = relationship("DanceAltRating", back_populates="user")
playlist_shares: Mapped[list["PlaylistShare"]] = relationship("PlaylistShare", foreign_keys="PlaylistShare.shared_with_id", back_populates="shared_with") playlist_shares: Mapped[list["PlaylistShare"]] = relationship("PlaylistShare", foreign_keys="PlaylistShare.shared_with_id", back_populates="shared_with")
# ── Song ────────────────────────────────────────────────────────────────────── # ── Song (global — ikke knyttet til en bruger) ────────────────────────────────
class Song(Base): class Song(Base):
__tablename__ = "songs" __tablename__ = "songs"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
title: Mapped[str] = mapped_column(String(255), nullable=False) title: Mapped[str] = mapped_column(String(255), nullable=False)
artist: Mapped[str] = mapped_column(String(255), default="") artist: Mapped[str] = mapped_column(String(255), default="")
album: Mapped[str] = mapped_column(String(255), default="") album: Mapped[str] = mapped_column(String(255), default="")
bpm: Mapped[int] = mapped_column(Integer, default=0) bpm: Mapped[int] = mapped_column(Integer, default=0)
duration_sec: Mapped[int] = mapped_column(Integer, default=0) duration_sec: Mapped[int] = mapped_column(Integer, default=0)
file_format: Mapped[str] = mapped_column(String(8), default="") mbid: Mapped[str|None] = mapped_column(String(36), nullable=True, unique=True)
mbid: Mapped[str|None] = mapped_column(String(36), nullable=True)
acoustid: Mapped[str|None] = mapped_column(String(64), nullable=True) acoustid: Mapped[str|None] = mapped_column(String(64), nullable=True)
synced_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) synced_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
owner: Mapped["User"] = relationship("User", back_populates="songs")
project_songs: Mapped[list["ProjectSong"]] = relationship("ProjectSong", back_populates="song") project_songs: Mapped[list["ProjectSong"]] = relationship("ProjectSong", back_populates="song")
song_dances: Mapped[list["SongDance"]] = relationship("SongDance", back_populates="song", cascade="all, delete-orphan") song_dances: Mapped[list["SongDance"]] = relationship("SongDance", back_populates="song", cascade="all, delete-orphan")
song_alt_dances: Mapped[list["SongAltDance"]] = relationship("SongAltDance", back_populates="song", cascade="all, delete-orphan") song_alt_dances: Mapped[list["SongAltDance"]] = relationship("SongAltDance", back_populates="song", cascade="all, delete-orphan")
@@ -68,7 +64,6 @@ class DanceLevel(Base):
class Dance(Base): class Dance(Base):
"""Dans-entitet: navn + niveau er unik kombination."""
__tablename__ = "dances" __tablename__ = "dances"
__table_args__ = (UniqueConstraint("name", "level_id", name="uq_dance_name_level"),) __table_args__ = (UniqueConstraint("name", "level_id", name="uq_dance_name_level"),)
@@ -80,7 +75,6 @@ class Dance(Base):
stepsheet_url: Mapped[str] = mapped_column(String(512), default="") stepsheet_url: Mapped[str] = mapped_column(String(512), default="")
notes: Mapped[str] = mapped_column(Text, default="") notes: Mapped[str] = mapped_column(Text, default="")
use_count: Mapped[int] = mapped_column(Integer, default=1) use_count: Mapped[int] = mapped_column(Integer, default=1)
source: Mapped[str] = mapped_column(String(16), default="local")
synced_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True) synced_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True)
level: Mapped["DanceLevel|None"] = relationship("DanceLevel") level: Mapped["DanceLevel|None"] = relationship("DanceLevel")
@@ -95,7 +89,7 @@ class Project(Base):
owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
name: Mapped[str] = mapped_column(String(128), nullable=False) name: Mapped[str] = mapped_column(String(128), nullable=False)
description: Mapped[str] = mapped_column(Text, default="") description: Mapped[str] = mapped_column(Text, default="")
visibility: Mapped[str] = mapped_column(String(16), default="private") # private|shared|public visibility: Mapped[str] = mapped_column(String(16), default="private")
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, onupdate=now_utc) updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, onupdate=now_utc)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
@@ -111,8 +105,8 @@ class ProjectMember(Base):
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False) project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False)
user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
role: Mapped[str] = mapped_column(String(16), default="viewer") # owner|editor|viewer role: Mapped[str] = mapped_column(String(16), default="viewer")
status: Mapped[str] = mapped_column(String(16), default="pending") # pending|accepted status: Mapped[str] = mapped_column(String(16), default="pending")
invited_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) invited_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
project: Mapped["Project"] = relationship("Project", back_populates="members") project: Mapped["Project"] = relationship("Project", back_populates="members")
@@ -135,15 +129,14 @@ class ProjectSong(Base):
class PlaylistShare(Base): class PlaylistShare(Base):
"""Deling af en playlist med specifikke brugere."""
__tablename__ = "playlist_shares" __tablename__ = "playlist_shares"
__table_args__ = (UniqueConstraint("project_id", "shared_with_id", name="uq_share"),) __table_args__ = (UniqueConstraint("project_id", "shared_with_id", name="uq_share"),)
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False) project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False)
shared_with_id: Mapped[str|None] = mapped_column(String(36), ForeignKey("users.id"), nullable=True) shared_with_id: Mapped[str|None] = mapped_column(String(36), ForeignKey("users.id"), nullable=True)
invited_email: Mapped[str] = mapped_column(String(255), default="") # til ikke-registrerede invited_email: Mapped[str] = mapped_column(String(255), default="")
permission: Mapped[str] = mapped_column(String(16), default="view") # view|copy|edit permission: Mapped[str] = mapped_column(String(16), default="view")
accepted_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True) accepted_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
@@ -154,7 +147,6 @@ class PlaylistShare(Base):
# ── Sang-dans tags ──────────────────────────────────────────────────────────── # ── Sang-dans tags ────────────────────────────────────────────────────────────
class SongDance(Base): class SongDance(Base):
"""Dans-tags på en sang (brugerens egne tags)."""
__tablename__ = "song_dances" __tablename__ = "song_dances"
__table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_dance"),) __table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_dance"),)
@@ -168,7 +160,6 @@ class SongDance(Base):
class SongAltDance(Base): class SongAltDance(Base):
"""Alternativ-dans tags på en sang."""
__tablename__ = "song_alt_dances" __tablename__ = "song_alt_dances"
__table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_alt_dance"),) __table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_alt_dance"),)
@@ -184,7 +175,6 @@ class SongAltDance(Base):
# ── Community dans-tags ─────────────────────────────────────────────────────── # ── Community dans-tags ───────────────────────────────────────────────────────
class CommunityDance(Base): class CommunityDance(Base):
"""Fællesskabets dans-tags på sange."""
__tablename__ = "community_dances" __tablename__ = "community_dances"
__table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "dance_id", name="uq_comm_dance"),) __table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "dance_id", name="uq_comm_dance"),)
@@ -200,7 +190,6 @@ class CommunityDance(Base):
class CommunityDanceAlt(Base): class CommunityDanceAlt(Base):
"""Fællesskabets alternativ-danse til en sang med ratings."""
__tablename__ = "community_dance_alts" __tablename__ = "community_dance_alts"
__table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "alt_dance_id", name="uq_comm_alt"),) __table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "alt_dance_id", name="uq_comm_alt"),)
@@ -220,14 +209,13 @@ class CommunityDanceAlt(Base):
class DanceAltRating(Base): class DanceAltRating(Base):
"""1-5 stjerne rating af en alternativ-dans."""
__tablename__ = "dance_alt_ratings" __tablename__ = "dance_alt_ratings"
__table_args__ = (UniqueConstraint("alternative_id", "user_id", name="uq_rating"),) __table_args__ = (UniqueConstraint("alternative_id", "user_id", name="uq_rating"),)
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid)
alternative_id: Mapped[str] = mapped_column(String(36), ForeignKey("community_dance_alts.id"), nullable=False) alternative_id: Mapped[str] = mapped_column(String(36), ForeignKey("community_dance_alts.id"), nullable=False)
user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False)
score: Mapped[int] = mapped_column(Integer, nullable=False) # 1-5 score: Mapped[int] = mapped_column(Integer, nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc)
alternative: Mapped["CommunityDanceAlt"] = relationship("CommunityDanceAlt", back_populates="ratings") alternative: Mapped["CommunityDanceAlt"] = relationship("CommunityDanceAlt", back_populates="ratings")

View File

@@ -4,20 +4,22 @@ sync.py — Push/pull synkronisering mellem lokal app og server.
POST /sync/push — send lokal data op til server POST /sync/push — send lokal data op til server
GET /sync/pull — hent server-data ned til app GET /sync/pull — hent server-data ned til app
""" """
import uuid
import logging
from datetime import datetime, timezone from datetime import datetime, timezone
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from pydantic import BaseModel from pydantic import BaseModel
from typing import Optional
from app.core.database import get_db from app.core.database import get_db
from app.core.security import get_current_user from app.core.security import get_current_user
from app.models import ( from app.models import (
User, Song, Dance, DanceLevel, Project, ProjectSong, User, Song, Dance, DanceLevel, Project, ProjectSong,
PlaylistShare, CommunityDance, CommunityDanceAlt, PlaylistShare, CommunityDance, SongDance, SongAltDance,
) )
router = APIRouter(prefix="/sync", tags=["sync"]) router = APIRouter(prefix="/sync", tags=["sync"])
logger = logging.getLogger(__name__)
# ── Schemas ─────────────────────────────────────────────────────────────────── # ── Schemas ───────────────────────────────────────────────────────────────────
@@ -29,7 +31,6 @@ class SongData(BaseModel):
album: str = "" album: str = ""
bpm: int = 0 bpm: int = 0
duration_sec: int = 0 duration_sec: int = 0
file_format: str = ""
mbid: str = "" mbid: str = ""
acoustid: str = "" acoustid: str = ""
@@ -76,7 +77,60 @@ class PushPayload(BaseModel):
song_dances: list[SongDanceData] = [] song_dances: list[SongDanceData] = []
song_alts: list[SongAltDanceData] = [] song_alts: list[SongAltDanceData] = []
playlists: list[PlaylistData] = [] playlists: list[PlaylistData] = []
deleted_playlists: list[str] = [] # server-IDs (api_project_id) på slettede playlister deleted_playlists: list[str] = [] # server-IDs (Project.id)
# ── Hjælpefunktion: find eller opret sang globalt ─────────────────────────────
def _find_or_create_song(db: Session, title: str, artist: str = "",
mbid: str = "", acoustid: str = "",
album: str = "", bpm: int = 0,
duration_sec: int = 0) -> Song:
"""
Match-hierarki:
1. MBID — sikreste
2. AcoustID
3. Titel + artist
4. Opret ny
"""
if mbid:
song = db.query(Song).filter_by(mbid=mbid).first()
if song:
return song
if acoustid:
song = db.query(Song).filter_by(acoustid=acoustid).first()
if song:
# Tilføj mbid hvis den mangler
if mbid and not song.mbid:
song.mbid = mbid
return song
if title:
song = db.query(Song).filter(
Song.title == title,
Song.artist == artist,
).first()
if song:
# Opdater med bedre data hvis tilgængeligt
if mbid and not song.mbid:
song.mbid = mbid
if acoustid and not song.acoustid:
song.acoustid = acoustid
if bpm and not song.bpm:
song.bpm = bpm
return song
# Opret ny global sang
song = Song(
title=title, artist=artist, album=album,
bpm=bpm, duration_sec=duration_sec,
mbid=mbid or None,
acoustid=acoustid or None,
)
db.add(song)
db.flush()
return song
# ── Push ────────────────────────────────────────────────────────────────────── # ── Push ──────────────────────────────────────────────────────────────────────
@@ -88,78 +142,50 @@ def push(
me: User = Depends(get_current_user), me: User = Depends(get_current_user),
): ):
"""Upload lokal data til server. Returnerer server-IDs.""" """Upload lokal data til server. Returnerer server-IDs."""
import sqlalchemy as _sa
song_id_map = {} # local_id → server Song.id song_id_map = {} # local_id → server Song.id
dance_id_map = {} # "name|level" → server Dance.id dance_id_map = {} # "name|level_id" → Dance.id
level_map = {} # level_name → DanceLevel.id level_map = {} # level_name.lower() → DanceLevel.id
# ── Dans-niveauer ───────────────────────────────────────────────────────── # ── Dans-niveauer ─────────────────────────────────────────────────────────
for lvl in db.query(DanceLevel).all(): for lvl in db.query(DanceLevel).all():
level_map[lvl.name.lower()] = lvl.id level_map[lvl.name.lower()] = lvl.id
# ── Sange ───────────────────────────────────────────────────────────────── # ── Sange (globale) ───────────────────────────────────────────────────────
for s in payload.songs: for s in payload.songs:
if not s.title: if not s.title:
continue continue
# Match 1: MBID — sikrest song = _find_or_create_song(
existing = None db, s.title, s.artist,
if s.mbid: mbid=s.mbid, acoustid=s.acoustid,
existing = db.query(Song).filter_by(mbid=s.mbid).first() album=s.album, bpm=s.bpm, duration_sec=s.duration_sec,
# Match 2: titel+artist globalt
if not existing:
existing = db.query(Song).filter(
Song.title == s.title,
Song.artist == s.artist,
).first()
if existing:
song_id_map[s.local_id] = existing.id
# Opdater BPM og MBID hvis de mangler
if s.bpm and not existing.bpm:
existing.bpm = s.bpm
if s.mbid and not existing.mbid:
existing.mbid = s.mbid
if s.acoustid and not existing.acoustid:
existing.acoustid = s.acoustid
else:
song = Song(
owner_id=me.id,
title=s.title, artist=s.artist, album=s.album,
bpm=s.bpm, duration_sec=s.duration_sec,
file_format=s.file_format,
mbid=s.mbid or None,
acoustid=s.acoustid or None,
) )
db.add(song)
db.flush()
song_id_map[s.local_id] = song.id song_id_map[s.local_id] = song.id
# ── Danse ───────────────────────────────────────────────────────────────── # ── Danse ─────────────────────────────────────────────────────────────────
for d in payload.dances: for d in payload.dances:
level_id = level_map.get(d.level_name.lower()) if d.level_name else None level_id = level_map.get(d.level_name.lower()) if d.level_name else None
key = f"{d.name.lower()}|{level_id}" key = f"{d.name.lower()}|{level_id}"
existing = db.query(Dance).filter_by(name=d.name, level_id=level_id).first() existing = db.query(Dance).filter_by(name=d.name, level_id=level_id).first()
if existing: if existing:
# Opdater info hvis den har ny data
if d.choreographer: existing.choreographer = d.choreographer if d.choreographer: existing.choreographer = d.choreographer
if d.video_url: existing.video_url = d.video_url if d.video_url: existing.video_url = d.video_url
if d.stepsheet_url: existing.stepsheet_url = d.stepsheet_url if d.stepsheet_url: existing.stepsheet_url = d.stepsheet_url
if d.notes: existing.notes = d.notes
dance_id_map[key] = existing.id dance_id_map[key] = existing.id
else: else:
dance = Dance( dance = Dance(
name=d.name, level_id=level_id, name=d.name, level_id=level_id,
choreographer=d.choreographer, video_url=d.video_url, choreographer=d.choreographer,
stepsheet_url=d.stepsheet_url, notes=d.notes, video_url=d.video_url,
stepsheet_url=d.stepsheet_url,
notes=d.notes,
) )
db.add(dance) db.add(dance)
db.flush() db.flush()
dance_id_map[key] = dance.id dance_id_map[key] = dance.id
# ── Sang-dans tags (brugerens egne) ───────────────────────────────────────
from app.models import SongDance, SongAltDance
# ── Sang-dans tags ──────────────────────────────────────────────────────── # ── Sang-dans tags ────────────────────────────────────────────────────────
from app.models import SongDance, SongAltDance
import sqlalchemy as _sa
for sd in payload.song_dances: for sd in payload.song_dances:
song_id = song_id_map.get(sd.song_local_id) song_id = song_id_map.get(sd.song_local_id)
if not song_id: if not song_id:
@@ -172,12 +198,8 @@ def push(
db.execute(_sa.text( db.execute(_sa.text(
"INSERT IGNORE INTO song_dances (id, song_id, dance_id, dance_order) " "INSERT IGNORE INTO song_dances (id, song_id, dance_id, dance_order) "
"VALUES (:id, :song_id, :dance_id, :dance_order)" "VALUES (:id, :song_id, :dance_id, :dance_order)"
), { ), {"id": str(uuid.uuid4()), "song_id": song_id,
"id": str(__import__("uuid").uuid4()), "dance_id": dance_id, "dance_order": sd.dance_order})
"song_id": song_id,
"dance_id": dance_id,
"dance_order": sd.dance_order,
})
for sa in payload.song_alts: for sa in payload.song_alts:
song_id = song_id_map.get(sa.song_local_id) song_id = song_id_map.get(sa.song_local_id)
@@ -191,35 +213,27 @@ def push(
db.execute(_sa.text( db.execute(_sa.text(
"INSERT IGNORE INTO song_alt_dances (id, song_id, dance_id, note) " "INSERT IGNORE INTO song_alt_dances (id, song_id, dance_id, note) "
"VALUES (:id, :song_id, :dance_id, :note)" "VALUES (:id, :song_id, :dance_id, :note)"
), { ), {"id": str(uuid.uuid4()), "song_id": song_id,
"id": str(__import__("uuid").uuid4()), "dance_id": dance_id, "note": sa.note or ""})
"song_id": song_id,
"dance_id": dance_id,
"note": sa.note or "",
})
# ── Playlister ──────────────────────────────────────────────────────────── # ── Playlister ────────────────────────────────────────────────────────────
# VIGTIGT: Match altid på local_id (= api_project_id på klienten),
# aldrig på navn — navn er ikke unikt og giver duplikater.
playlist_id_map = {} playlist_id_map = {}
for pl in payload.playlists: for pl in payload.playlists:
# Prøv først at finde via server-ID (local_id er klientens lokale db-id # Find eksisterende via server-ID (local_id er api_project_id på klienten)
# som tidligere er returneret som server-ID via playlist_id_map)
existing = None existing = None
if pl.local_id: if pl.local_id:
existing = db.query(Project).filter_by( existing = db.query(Project).filter_by(
id=pl.local_id, owner_id=me.id id=pl.local_id, owner_id=me.id
).first() ).first()
# Fallback: navn — kun hvis vi aldrig har set denne liste før
if not existing: if not existing:
existing = db.query(Project).filter_by( existing = db.query(Project).filter_by(
owner_id=me.id, name=pl.name owner_id=me.id, name=pl.name
).first() ).first()
if existing: if existing:
existing.name = pl.name existing.name = pl.name
existing.description = pl.description existing.description = pl.description
existing.visibility = pl.visibility existing.visibility = pl.visibility
# Opdater kun sange hvis push faktisk har sange med
if pl.songs: if pl.songs:
db.query(ProjectSong).filter_by(project_id=existing.id).delete() db.query(ProjectSong).filter_by(project_id=existing.id).delete()
project = existing project = existing
@@ -233,27 +247,21 @@ def push(
playlist_id_map[pl.local_id] = project.id playlist_id_map[pl.local_id] = project.id
for ps in pl.songs: for ps in pl.songs:
# Prøv først via song_id_map (lokal ID) # Find sang via song_id_map eller titel+artist
song_id = song_id_map.get(ps.song_local_id) song_id = song_id_map.get(ps.song_local_id)
# Fallback: match på titel+artist
if not song_id and ps.song_title: if not song_id and ps.song_title:
existing_song = db.query(Song).filter_by( song = _find_or_create_song(db, ps.song_title, ps.song_artist)
title=ps.song_title, artist=ps.song_artist song_id = song.id
).first()
if existing_song:
song_id = existing_song.id
if not song_id: if not song_id:
continue continue
proj_song = ProjectSong( db.add(ProjectSong(
project_id=project.id, song_id=song_id, project_id=project.id, song_id=song_id,
position=ps.position, status=ps.status, position=ps.position, status=ps.status,
is_workshop=ps.is_workshop, is_workshop=ps.is_workshop,
dance_override=ps.dance_override, dance_override=ps.dance_override,
) ))
db.add(proj_song)
# ── Slet playlister der er fjernet lokalt ───────────────────────────────── # ── Slet playlister ───────────────────────────────────────────────────────
# Klienten sender api_project_id (= server Project.id) som strings
for project_id in payload.deleted_playlists: for project_id in payload.deleted_playlists:
proj = db.query(Project).filter_by(id=project_id, owner_id=me.id).first() proj = db.query(Project).filter_by(id=project_id, owner_id=me.id).first()
if proj: if proj:
@@ -266,6 +274,7 @@ def push(
"status": "ok", "status": "ok",
"songs_synced": len(song_id_map), "songs_synced": len(song_id_map),
"playlists_synced": len(playlist_id_map), "playlists_synced": len(playlist_id_map),
"song_id_map": {k: str(v) for k, v in song_id_map.items()},
"playlist_id_map": {k: str(v) for k, v in playlist_id_map.items()}, "playlist_id_map": {k: str(v) for k, v in playlist_id_map.items()},
} }
@@ -285,7 +294,7 @@ def pull(
for l in db.query(DanceLevel).order_by(DanceLevel.sort_order).all() for l in db.query(DanceLevel).order_by(DanceLevel.sort_order).all()
] ]
# Danse med info # Danse
dances = [ dances = [
{ {
"name": d.name, "name": d.name,
@@ -299,85 +308,79 @@ def pull(
for d in db.query(Dance).order_by(Dance.use_count.desc()).limit(500).all() for d in db.query(Dance).order_by(Dance.use_count.desc()).limit(500).all()
] ]
# Community dans-tags (populære) # Delte playlister
community = [] shared_ids = {
for cd in db.query(CommunityDance).limit(1000).all(): s.project_id for s in db.query(PlaylistShare).filter(
community.append({
"song_title": cd.song_title,
"song_artist": cd.song_artist,
"dance_id": cd.dance_id,
})
# Delte playlister (read-only — kun ejeren kan redigere)
shared_ids = set()
for s in db.query(PlaylistShare).filter(
(PlaylistShare.shared_with_id == me.id) | (PlaylistShare.shared_with_id == me.id) |
(PlaylistShare.invited_email == me.email) (PlaylistShare.invited_email == me.email)
).all(): ).all()
shared_ids.add(s.project_id) }
shared = [] shared = []
for p in db.query(Project).filter(Project.id.in_(shared_ids)).all(): for p in db.query(Project).filter(Project.id.in_(shared_ids)).all():
if p.owner_id == me.id: if p.owner_id == me.id:
continue # Egne lister håndteres separat
owner = db.query(User).filter_by(id=p.owner_id).first()
songs_out = []
for ps in p.project_songs:
song = db.query(Song).filter_by(id=ps.song_id).first()
if not song:
continue continue
songs_out.append({ owner = db.query(User).filter_by(id=p.owner_id).first()
"title": song.title,
"artist": song.artist,
"position": ps.position,
"status": ps.status,
"is_workshop": ps.is_workshop,
"dance_override": ps.dance_override or "",
})
shared.append({ shared.append({
"server_id": p.id, "server_id": p.id,
"name": p.name, "name": p.name,
"owner": owner.username if owner else "?", "owner": owner.username if owner else "?",
"songs": sorted(songs_out, key=lambda x: x["position"]), "songs": [
}) {
"song_id": str(ps.song_id),
# Egne playlister "title": ps.song.title,
my_playlists = [] "artist": ps.song.artist,
all_projects = db.query(Project).filter_by(owner_id=me.id).all() "mbid": ps.song.mbid or "",
import logging "acoustid": ps.song.acoustid or "",
logging.getLogger(__name__).info(f"Pull: fandt {len(all_projects)} projekter for {me.id}") "bpm": ps.song.bpm,
for p in all_projects: "duration_sec": ps.song.duration_sec,
songs_out = []
for ps in p.project_songs:
song = db.query(Song).filter_by(id=ps.song_id).first()
if not song:
continue
songs_out.append({
"title": song.title,
"artist": song.artist,
"position": ps.position, "position": ps.position,
"status": ps.status, "status": ps.status,
"is_workshop": ps.is_workshop, "is_workshop": ps.is_workshop,
"dance_override": ps.dance_override or "", "dance_override": ps.dance_override or "",
}
for ps in sorted(p.project_songs, key=lambda x: x.position)
if ps.song
],
}) })
# Egne playlister
my_playlists = []
for p in db.query(Project).filter_by(owner_id=me.id).all():
my_playlists.append({ my_playlists.append({
"server_id": p.id, "server_id": p.id,
"name": p.name, "name": p.name,
"description": p.description or "", "description": p.description or "",
"songs": sorted(songs_out, key=lambda x: x["position"]), "songs": [
{
"song_id": str(ps.song_id),
"title": ps.song.title,
"artist": ps.song.artist,
"mbid": ps.song.mbid or "",
"acoustid": ps.song.acoustid or "",
"bpm": ps.song.bpm,
"duration_sec": ps.song.duration_sec,
"position": ps.position,
"status": ps.status,
"is_workshop": ps.is_workshop,
"dance_override": ps.dance_override or "",
}
for ps in sorted(p.project_songs, key=lambda x: x.position)
if ps.song
],
}) })
# Brugerens egne dans-tags logger.info(f"Pull: {len(my_playlists)} playlister for {me.username}")
from app.models import SongDance, SongAltDance
# Dans-tags (brugerens egne)
song_tags = [] song_tags = []
for sd in db.query(SongDance).join(Song).filter(Song.owner_id == me.id).all(): for sd in db.query(SongDance).all():
dance = db.query(Dance).filter_by(id=sd.dance_id).first() dance = db.query(Dance).filter_by(id=sd.dance_id).first()
if not dance: if not dance:
continue continue
level = db.query(DanceLevel).filter_by(id=dance.level_id).first() if dance.level_id else None level = db.query(DanceLevel).filter_by(id=dance.level_id).first() if dance.level_id else None
song_tags.append({ song_tags.append({
"song_title": sd.song.title, "song_id": sd.song_id,
"song_artist": sd.song.artist,
"dance_name": dance.name, "dance_name": dance.name,
"level_name": level.name if level else "", "level_name": level.name if level else "",
"dance_order": sd.dance_order, "dance_order": sd.dance_order,
@@ -386,7 +389,6 @@ def pull(
return { return {
"levels": levels, "levels": levels,
"dances": dances, "dances": dances,
"community": community,
"shared": shared, "shared": shared,
"my_playlists": my_playlists, "my_playlists": my_playlists,
"song_tags": song_tags, "song_tags": song_tags,

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,11 @@
""" """
scanner.py — Scanning af musikbiblioteker i baggrunden. scanner.py — Scanning af musikbiblioteker i baggrunden. v0.9
Kører som en separat subprocess der scanner ét bibliotek ad gangen Skriver til files-tabellen og finder/opretter sange i songs-tabellen.
og rapporterer fremgang via stdout JSON-linjer.
Kan også importeres direkte og bruges via ScanWorker QThread.
""" """
import os import os
import sys
import json
import sqlite3
import uuid
import logging import logging
import time
from pathlib import Path from pathlib import Path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -19,8 +13,8 @@ logger = logging.getLogger(__name__)
SUPPORTED = {'.mp3', '.flac', '.m4a', '.ogg', '.wav', '.aiff', '.wma'} SUPPORTED = {'.mp3', '.flac', '.m4a', '.ogg', '.wav', '.aiff', '.wma'}
def is_supported(path: Path) -> bool: def is_supported(path) -> bool:
return path.suffix.lower() in SUPPORTED return Path(path).suffix.lower() in SUPPORTED
def get_file_mtime(path: Path) -> str: def get_file_mtime(path: Path) -> str:
@@ -32,29 +26,28 @@ def get_file_mtime(path: Path) -> str:
def scan_library(library_id: int, library_path: str, db_path: str, def scan_library(library_id: int, library_path: str, db_path: str,
overwrite_bpm: bool = False, overwrite_bpm: bool = False,
progress_callback=None): progress_callback=None) -> int:
""" """
Scan ét bibliotek og upsert sange til SQLite. Scan ét bibliotek og upsert til files + songs tabellerne.
progress_callback(done, total, current_file) kaldes løbende. Returnerer antal scannede filer.
""" """
import sqlite3
from local.tag_reader import read_tags from local.tag_reader import read_tags
from local.local_db import find_or_create_song, upsert_file
conn = sqlite3.connect(db_path)
conn.row_factory = sqlite3.Row
base = Path(library_path) base = Path(library_path)
if not base.exists(): if not base.exists():
conn.close()
return 0 return 0
# Byg indeks over kendte filer # Byg indeks over kendte filer (path → mtime)
conn = sqlite3.connect(db_path, timeout=10)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
known = {} known = {}
for row in conn.execute( for row in conn.execute(
"SELECT local_path, file_modified_at, file_missing FROM songs WHERE library_id=?", "SELECT local_path, file_modified_at FROM files WHERE file_missing=0"
(library_id,)
).fetchall(): ).fetchall():
# Sange markeret som manglende medtages ikke i known — de skal altid genscanes
if not row["file_missing"]:
known[row["local_path"]] = row["file_modified_at"] known[row["local_path"]] = row["file_modified_at"]
# Find alle musikfiler # Find alle musikfiler
@@ -68,8 +61,6 @@ def scan_library(library_id: int, library_path: str, db_path: str,
total = len(all_files) total = len(all_files)
done = 0 done = 0
import time
for fp in all_files: for fp in all_files:
path_str = str(fp) path_str = str(fp)
mtime = get_file_mtime(fp) mtime = get_file_mtime(fp)
@@ -77,108 +68,55 @@ def scan_library(library_id: int, library_path: str, db_path: str,
if progress_callback: if progress_callback:
progress_callback(done, total, fp.name) progress_callback(done, total, fp.name)
# Spring over hvis ikke ændret # Spring over uændrede filer
if path_str in known and known[path_str] == mtime: if path_str in known and known[path_str] == mtime:
done += 1 done += 1
# Yield hvert 100. fil så andre tråde kan køre
if done % 100 == 0:
time.sleep(0.005) time.sleep(0.005)
continue continue
try: try:
tags = read_tags(fp) tags = read_tags(str(fp))
extra = json.dumps(tags.get("extra_tags", {}), ensure_ascii=False) title = tags.get("title", "") or fp.stem
# Match 0: MBID-match — sikrest mulige match
existing = None
mbid_from_file = tags.get("mbid", "")
if mbid_from_file:
existing = conn.execute(
"SELECT id, bpm FROM songs WHERE mbid=? LIMIT 1",
(mbid_from_file,)
).fetchone()
if existing:
conn.execute(
"UPDATE songs SET local_path=? WHERE id=?",
(path_str, existing["id"])
)
# Match 1: præcis sti-match
if not existing:
existing = conn.execute(
"SELECT id, bpm FROM songs WHERE local_path=?", (path_str,)
).fetchone()
# Match 2: titel+artist match — fil er flyttet eller var missing
if not existing:
title = tags.get("title", "")
artist = tags.get("artist", "") artist = tags.get("artist", "")
if title: album = tags.get("album", "")
# Prioritér file_missing=1 sange, men tag også sange med ugyldig sti bpm = tags.get("bpm", 0)
existing = conn.execute(""" mbid = tags.get("mbid", "")
SELECT id, bpm FROM songs acoustid = tags.get("acoustid", "")
WHERE title=? AND artist=? AND file_missing=1 duration_sec = tags.get("duration_sec", 0)
LIMIT 1 file_format = tags.get("file_format", fp.suffix.lstrip(".").lower())
""", (title, artist)).fetchone() extra_tags = tags.get("extra_tags", "{}")
if not existing:
# Tjek om der er en sang med samme titel+artist men ugyldig sti
existing = conn.execute("""
SELECT id, bpm, local_path FROM songs
WHERE title=? AND artist=? AND file_missing=0
LIMIT 1
""", (title, artist)).fetchone()
if existing:
from pathlib import Path as _Path
old_path = existing["local_path"] or ""
if old_path and not _Path(old_path).exists():
pass # Sti er ugyldig — brug dette match
else:
existing = None # Sti er valid — det er en anden fil
if existing: # Find eller opret sang i global katalog
# Opdater stien så den peger på den nye placering song_id = find_or_create_song(
conn.execute( title=title, artist=artist, album=album,
"UPDATE songs SET local_path=? WHERE id=?", bpm=bpm, duration_sec=duration_sec,
(path_str, existing["id"]) mbid=mbid, acoustid=acoustid,
) )
if existing: # Opdater BPM på sangen hvis vi har bedre data
bpm = tags.get("bpm", 0) if bpm and bpm > 0:
if not overwrite_bpm and existing["bpm"] and existing["bpm"] > 0: conn.execute(
bpm = existing["bpm"] # behold eksisterende BPM "UPDATE songs SET bpm=? WHERE id=? AND (bpm=0 OR bpm IS NULL)",
mbid = tags.get("mbid", "") (bpm, song_id)
conn.execute(""" )
UPDATE songs SET
library_id=?, title=?, artist=?, album=?,
bpm=?, duration_sec=?, file_format=?,
file_modified_at=?, file_missing=0, extra_tags=?,
mbid=CASE WHEN ? != '' THEN ? ELSE mbid END
WHERE id=?
""", (library_id, tags.get("title",""), tags.get("artist",""),
tags.get("album",""), bpm, tags.get("duration_sec",0),
tags.get("file_format",""), mtime, extra,
mbid, mbid, existing["id"]))
song_id = existing["id"]
else:
song_id = str(uuid.uuid4())
conn.execute("""
INSERT OR IGNORE INTO songs
(id, library_id, local_path, title, artist, album,
bpm, duration_sec, file_format, file_modified_at, extra_tags, mbid)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?)
""", (song_id, library_id, path_str,
tags.get("title",""), tags.get("artist",""),
tags.get("album",""), tags.get("bpm",0),
tags.get("duration_sec",0), tags.get("file_format",""),
mtime, extra, tags.get("mbid","")))
# Importer dans-tags fra filen hvis de ikke allerede er i DB # Opret eller opdater fil-post
upsert_file(
song_id=song_id,
local_path=path_str,
file_format=file_format,
file_modified_at=mtime,
extra_tags=extra_tags,
)
# Dans-tags fra fil
file_dances = tags.get("dances", []) file_dances = tags.get("dances", [])
if file_dances: if file_dances:
existing_dances = conn.execute( existing_count = conn.execute(
"SELECT COUNT(*) FROM song_dances WHERE song_id=?", (song_id,) "SELECT COUNT(*) FROM song_dances WHERE song_id=?", (song_id,)
).fetchone()[0] ).fetchone()[0]
if existing_dances == 0: if existing_count == 0:
import uuid
for order, dance_name in enumerate(file_dances, start=1): for order, dance_name in enumerate(file_dances, start=1):
dance_row = conn.execute( dance_row = conn.execute(
"SELECT id FROM dances WHERE name=? COLLATE NOCASE LIMIT 1", "SELECT id FROM dances WHERE name=? COLLATE NOCASE LIMIT 1",
@@ -192,64 +130,25 @@ def scan_library(library_id: int, library_path: str, db_path: str,
else: else:
dance_id = dance_row["id"] dance_id = dance_row["id"]
conn.execute( conn.execute(
"INSERT OR IGNORE INTO song_dances (song_id, dance_id, dance_order) VALUES (?,?,?)", "INSERT OR IGNORE INTO song_dances (id, song_id, dance_id, dance_order) VALUES (?,?,?,?)",
(song_id, dance_id, order) (str(uuid.uuid4()), song_id, dance_id, order)
) )
conn.commit() conn.commit()
except Exception as e: except Exception as e:
# UNIQUE constraint er forventet og ufarlig — sang findes allerede
if "UNIQUE constraint" in str(e):
logger.debug(f"Sang allerede i DB: {fp.name}")
else:
logger.warning(f"Scan fejl {fp.name}: {e}") logger.warning(f"Scan fejl {fp.name}: {e}")
done += 1 done += 1
# Lille pause efter hver scannet fil så GUI ikke hænger
time.sleep(0.02) time.sleep(0.02)
# Marker manglende filer # Marker manglende filer
for path_str in known: for path_str in known:
if not Path(path_str).exists(): if not Path(path_str).exists():
conn.execute( conn.execute(
"UPDATE songs SET file_missing=1 WHERE local_path=?", (path_str,) "UPDATE files SET file_missing=1 WHERE local_path=?", (path_str,)
)
conn.commit()
conn.execute(
"UPDATE libraries SET last_full_scan=datetime('now') WHERE id=?",
(library_id,)
) )
conn.commit() conn.commit()
conn.close() conn.close()
logger.info(f"Scan færdig: {done} filer i {library_path}")
return done return done
# ── Subprocess entry point ─────────────────────────────────────────────────────
if __name__ == "__main__":
"""
Kørsel som subprocess:
python scanner.py <library_id> <library_path> <db_path>
Rapporterer JSON-linjer til stdout: {"done":N,"total":M,"file":"..."}
"""
if len(sys.argv) < 4:
sys.exit(1)
lib_id = int(sys.argv[1])
lib_path = sys.argv[2]
db_path = sys.argv[3]
# Tilføj app-mappen til path så local.tag_reader kan importeres
app_dir = str(Path(__file__).parent.parent)
if app_dir not in sys.path:
sys.path.insert(0, app_dir)
def report(done, total, filename):
print(json.dumps({"done": done, "total": total, "file": filename}),
flush=True)
count = scan_library(lib_id, lib_path, db_path,
progress_callback=report)
print(json.dumps({"done": count, "total": count, "finished": True}),
flush=True)

View File

@@ -1,150 +1,122 @@
""" """
sync_manager.py — Synkronisering mellem lokal SQLite og server API. sync_manager.py — Synkronisering mellem lokal database og server. v0.9
Kører i baggrundstråd — blokerer aldrig GUI.
""" """
import json import json
import logging
import sqlite3 import sqlite3
import threading import threading
import urllib.request import urllib.request
import urllib.error import urllib.error
import logging
from pathlib import Path from pathlib import Path
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class SyncManager: class SyncManager:
def __init__(self, db_path: str, server_url: str, token: str):
self._db_path = db_path
self._server_url = server_url.rstrip("/")
self._token = token
self._lock = threading.Lock()
def _headers(self): def __init__(self, api_url: str, db_path: str):
return { self._api_url = api_url.rstrip("/")
"Content-Type": "application/json", self._db_path = db_path
"Authorization": f"Bearer {self._token}", self._token: str | None = None
}
def set_token(self, token: str):
self._token = token
# ── HTTP ──────────────────────────────────────────────────────────────────
def _post(self, path: str, data: dict) -> dict: def _post(self, path: str, data: dict) -> dict:
body = json.dumps(data).encode("utf-8") body = json.dumps(data).encode()
req = urllib.request.Request( req = urllib.request.Request(
f"{self._server_url}{path}", data=body, f"{self._api_url}{path}",
headers=self._headers(), method="POST" data=body,
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {self._token}",
},
method="POST",
) )
try: try:
with urllib.request.urlopen(req, timeout=30) as resp: with urllib.request.urlopen(req, timeout=30) as resp:
return json.loads(resp.read()) return json.loads(resp.read())
except urllib.error.HTTPError as e: except urllib.error.HTTPError as e:
detail = e.read().decode("utf-8", errors="replace") detail = e.read().decode()
raise Exception(f"HTTP {e.code}: {detail}") raise Exception(f"HTTP {e.code}: {detail}")
def _get(self, path: str) -> dict: def _get(self, path: str) -> dict:
req = urllib.request.Request( req = urllib.request.Request(
f"{self._server_url}{path}", f"{self._api_url}{path}",
headers=self._headers(), method="GET" headers={"Authorization": f"Bearer {self._token}"},
) )
try:
with urllib.request.urlopen(req, timeout=30) as resp: with urllib.request.urlopen(req, timeout=30) as resp:
return json.loads(resp.read()) return json.loads(resp.read())
except urllib.error.HTTPError as e:
detail = e.read().decode()
raise Exception(f"HTTP {e.code}: {detail}")
# ── Push ────────────────────────────────────────────────────────────────── # ── Push ──────────────────────────────────────────────────────────────────
def push(self, on_done=None, on_error=None): def push(self, on_done=None, on_error=None):
"""Push lokal data til server i baggrundstråd."""
def _run(): def _run():
try: try:
payload = self._build_push_payload() payload = self._build_push_payload()
logger.info(f"Push OK: {len(payload['songs'])} sange") logger.info(f"Push: {len(payload['songs'])} sange, "
f"{len(payload['playlists'])} playlister")
result = self._post("/sync/push", payload) result = self._post("/sync/push", payload)
self._save_playlist_ids(result.get("playlist_id_map", {})) self._save_server_ids(
# Fjern soft-slettede playlister permanent efter succesfuld push result.get("song_id_map", {}),
if payload.get("deleted_playlists"): result.get("playlist_id_map", {}),
conn = sqlite3.connect(self._db_path)
conn.execute(
"DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL"
) )
conn.commit() logger.info(f"Push OK: {result.get('songs_synced','?')} sange synkroniseret")
conn.close()
logger.info(f"Push OK: {result.get('songs_synced', '?')} sange synkroniseret")
if on_done: if on_done:
on_done(result) on_done(result)
except Exception as e: except Exception as e:
logger.error(f"Sync push fejl: {e}", exc_info=True) logger.error(f"Push fejl: {e}", exc_info=True)
if on_error: if on_error:
on_error(str(e)) on_error(str(e))
threading.Thread(target=_run, daemon=True).start() threading.Thread(target=_run, daemon=True).start()
def _save_playlist_ids(self, id_map: dict): # ── Push + Pull ───────────────────────────────────────────────────────────
"""Gem server-IDs (api_project_id) på lokale playlister."""
if not id_map:
return
conn = sqlite3.connect(self._db_path)
for local_id, server_id in id_map.items():
try:
conn.execute(
"UPDATE playlists SET api_project_id=? WHERE id=?",
(server_id, int(local_id))
)
except Exception:
pass
conn.commit()
conn.close()
def pull(self, on_done=None, on_error=None):
"""Pull server-data ned i baggrundstråd."""
def _run():
try:
result = self._get("/sync/pull")
pl_count = len(result.get("my_playlists", []))
logger.info(f"Pull OK: {pl_count} playlister")
self._apply_pull(result)
if on_done:
on_done(result)
except Exception as e:
logger.error(f"Sync pull fejl: {e}", exc_info=True)
if on_error:
on_error(str(e))
threading.Thread(target=_run, daemon=True).start()
def push_and_pull(self, on_done=None, on_error=None): def push_and_pull(self, on_done=None, on_error=None):
"""Push FØR pull — så sletninger når serveren inden pull henter data ned."""
def _run(): def _run():
try: try:
# 1. Push lokal data op — inkl. sletninger # 1. Push
payload = self._build_push_payload() payload = self._build_push_payload()
deleted = payload.get("deleted_playlists", []) deleted = payload.get("deleted_playlists", [])
logger.info(f"Sync push — {len(payload['songs'])} sange, " logger.info(f"Sync push — {len(payload['songs'])} sange, "
f"{len(payload['playlists'])} playlister, " f"{len(payload['playlists'])} playlister, "
f"sletter {len(deleted)}: {deleted}") f"sletter {len(deleted)}: {deleted}")
push_result = self._post("/sync/push", payload) push_result = self._post("/sync/push", payload)
self._save_playlist_ids(push_result.get("playlist_id_map", {})) self._save_server_ids(
push_result.get("song_id_map", {}),
push_result.get("playlist_id_map", {}),
)
logger.info(f"Push svar: status={push_result.get('status')}, " logger.info(f"Push svar: status={push_result.get('status')}, "
f"sange={push_result.get('songs_synced', 0)}, " f"sange={push_result.get('songs_synced', 0)}, "
f"playlister={push_result.get('playlists_synced', 0)}") f"playlister={push_result.get('playlists_synced', 0)}")
# 2. Pull — sletninger er nu gennemført på serveren. # 2. Pull
# _apply_pull filtrerer is_deleted=1 rækker fra automatisk.
pull_result = self._get("/sync/pull") pull_result = self._get("/sync/pull")
pl_names = [p.get("name") for p in pull_result.get("my_playlists", [])] pl_names = [p.get("name") for p in pull_result.get("my_playlists", [])]
logger.info(f"Pull modtog {len(pl_names)} playlister: {pl_names}") logger.info(f"Pull modtog {len(pl_names)} playlister: {pl_names}")
self._apply_pull(pull_result) self._apply_pull(pull_result)
# Fjern soft-slettede playlister permanent nu serveren er opdateret # 3. Fjern soft-slettede permanent efter succesfuld sync
if deleted: if deleted:
conn = sqlite3.connect(self._db_path) conn = sqlite3.connect(self._db_path, timeout=10)
conn.execute("PRAGMA journal_mode=WAL")
conn.execute( conn.execute(
"DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL" "DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL"
) )
conn.commit() conn.commit()
conn.close() conn.close()
logger.info(f"Soft-slettede playlister fjernet lokalt efter sync") logger.info("Soft-slettede playlister fjernet lokalt efter sync")
pl_count = len(pull_result.get("my_playlists", [])) pl_count = len(pull_result.get("my_playlists", []))
logger.info( logger.info(f"Sync OK — {len(payload['songs'])} sange, "
f"Sync OK — {len(payload['songs'])} sange, "
f"{len(payload['playlists'])} playlister, " f"{len(payload['playlists'])} playlister, "
f"{pl_count} server-playlister" f"{pl_count} server-playlister")
)
if on_done: if on_done:
on_done({"push": push_result, "pull": pull_result}) on_done({"push": push_result, "pull": pull_result})
except Exception as e: except Exception as e:
@@ -156,34 +128,36 @@ class SyncManager:
# ── Byg payload ─────────────────────────────────────────────────────────── # ── Byg payload ───────────────────────────────────────────────────────────
def _build_push_payload(self) -> dict: def _build_push_payload(self) -> dict:
conn = sqlite3.connect(self._db_path) conn = sqlite3.connect(self._db_path, timeout=10)
conn.row_factory = sqlite3.Row conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
# Sange # Sange (dem der har filer — altså kendes lokalt)
songs = [] songs = []
for row in conn.execute( for row in conn.execute("""
"SELECT id, title, artist, album, bpm, duration_sec, file_format, mbid, acoustid " SELECT DISTINCT s.id, s.title, s.artist, s.album,
"FROM songs WHERE file_missing=0" s.bpm, s.duration_sec, s.mbid, s.acoustid, s.server_synced
).fetchall(): FROM songs s
JOIN files f ON f.song_id = s.id AND f.file_missing = 0
""").fetchall():
songs.append({ songs.append({
"local_id": str(row["id"]), "local_id": row["id"],
"title": row["title"] or "", "title": row["title"] or "",
"artist": row["artist"] or "", "artist": row["artist"] or "",
"album": row["album"] or "", "album": row["album"] or "",
"bpm": row["bpm"] or 0, "bpm": row["bpm"] or 0,
"duration_sec": row["duration_sec"] or 0, "duration_sec": row["duration_sec"] or 0,
"file_format": row["file_format"] or "",
"mbid": row["mbid"] or "", "mbid": row["mbid"] or "",
"acoustid": row["acoustid"] or "", "acoustid": row["acoustid"] or "",
}) })
# Danse # Danse
dances = [] dances = []
for row in conn.execute( for row in conn.execute("""
"SELECT d.name, dl.name as level_name, d.choreographer, " SELECT d.name, dl.name as level_name, d.choreographer,
"d.video_url, d.stepsheet_url, d.notes " d.video_url, d.stepsheet_url, d.notes
"FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id" FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id
).fetchall(): """).fetchall():
dances.append({ dances.append({
"name": row["name"] or "", "name": row["name"] or "",
"level_name": row["level_name"] or "", "level_name": row["level_name"] or "",
@@ -193,16 +167,17 @@ class SyncManager:
"notes": row["notes"] or "", "notes": row["notes"] or "",
}) })
# Dans-tags per sang # Dans-tags
song_dances = [] song_dances = []
for row in conn.execute(""" for row in conn.execute("""
SELECT sd.song_id, d.name as dance_name, dl.name as level_name, sd.dance_order SELECT sd.song_id, d.name as dance_name,
dl.name as level_name, sd.dance_order
FROM song_dances sd FROM song_dances sd
JOIN dances d ON d.id = sd.dance_id JOIN dances d ON d.id = sd.dance_id
LEFT JOIN dance_levels dl ON dl.id = d.level_id LEFT JOIN dance_levels dl ON dl.id = d.level_id
""").fetchall(): """).fetchall():
song_dances.append({ song_dances.append({
"song_local_id": str(row["song_id"]), "song_local_id": row["song_id"],
"dance_name": row["dance_name"], "dance_name": row["dance_name"],
"level_name": row["level_name"] or "", "level_name": row["level_name"] or "",
"dance_order": row["dance_order"], "dance_order": row["dance_order"],
@@ -211,36 +186,36 @@ class SyncManager:
# Alternativ-danse # Alternativ-danse
song_alts = [] song_alts = []
for row in conn.execute(""" for row in conn.execute("""
SELECT sad.song_id, d.name as dance_name, dl.name as level_name, sad.note SELECT sad.song_id, d.name as dance_name,
dl.name as level_name, sad.note
FROM song_alt_dances sad FROM song_alt_dances sad
JOIN dances d ON d.id = sad.dance_id JOIN dances d ON d.id = sad.dance_id
LEFT JOIN dance_levels dl ON dl.id = d.level_id LEFT JOIN dance_levels dl ON dl.id = d.level_id
""").fetchall(): """).fetchall():
song_alts.append({ song_alts.append({
"song_local_id": str(row["song_id"]), "song_local_id": row["song_id"],
"dance_name": row["dance_name"], "dance_name": row["dance_name"],
"level_name": row["level_name"] or "", "level_name": row["level_name"] or "",
"note": row["note"] or "", "note": row["note"] or "",
}) })
# Playlister — send alle (nye og eksisterende) til serveren. # Playlister — alle ikke-slettede
# Brug api_project_id som local_id hvis den kendes — så serveren
# kan matche på ID og ikke oprette duplikater.
playlists = [] playlists = []
for pl in conn.execute( for pl in conn.execute("""
"SELECT id, name, description, tags, api_project_id FROM playlists " SELECT id, name, description, tags, api_project_id
"WHERE name != '__aktiv__' AND is_deleted = 0" FROM playlists
).fetchall(): WHERE name != '__aktiv__' AND is_deleted = 0
""").fetchall():
pl_songs = [] pl_songs = []
for ps in conn.execute(""" for ps in conn.execute("""
SELECT s.id, s.title, s.artist, SELECT s.id as song_id, s.title, s.artist,
ps.position, ps.status, ps.is_workshop, ps.dance_override ps.position, ps.status, ps.is_workshop, ps.dance_override
FROM playlist_songs ps FROM playlist_songs ps
JOIN songs s ON s.id = ps.song_id JOIN songs s ON s.id = ps.song_id
WHERE ps.playlist_id=? ORDER BY ps.position WHERE ps.playlist_id=? ORDER BY ps.position
""", (pl["id"],)).fetchall(): """, (pl["id"],)).fetchall():
pl_songs.append({ pl_songs.append({
"song_local_id": str(ps["id"]), "song_local_id": ps["song_id"],
"song_title": ps["title"] or "", "song_title": ps["title"] or "",
"song_artist": ps["artist"] or "", "song_artist": ps["artist"] or "",
"position": int(ps["position"] or 1), "position": int(ps["position"] or 1),
@@ -248,9 +223,8 @@ class SyncManager:
"is_workshop": bool(ps["is_workshop"]), "is_workshop": bool(ps["is_workshop"]),
"dance_override": ps["dance_override"] or "", "dance_override": ps["dance_override"] or "",
}) })
# Brug api_project_id som local_id hvis den kendes — # Brug api_project_id som local_id hvis kendt
# serveren bruger dette til at finde eksisterende liste local_id = pl["api_project_id"] or pl["id"]
local_id = pl["api_project_id"] or str(pl["id"])
playlists.append({ playlists.append({
"local_id": local_id, "local_id": local_id,
"name": pl["name"], "name": pl["name"],
@@ -260,9 +234,7 @@ class SyncManager:
"songs": pl_songs, "songs": pl_songs,
}) })
# Slettede playlister — skal fjernes fra serveren. # Slettede playlister
# Serveren forventer en liste af strings (api_project_id).
# Kun playlister der faktisk er nået serveren (har api_project_id).
deleted = [ deleted = [
row["api_project_id"] row["api_project_id"]
for row in conn.execute( for row in conn.execute(
@@ -281,21 +253,82 @@ class SyncManager:
"deleted_playlists": deleted, "deleted_playlists": deleted,
} }
# ── Gem server-IDs ────────────────────────────────────────────────────────
def _save_server_ids(self, song_id_map: dict, playlist_id_map: dict):
"""
Gem server-IDs lokalt.
song_id_map: lokal_song_id → server_song_id
playlist_id_map: lokal_pl_id → server_pl_id
"""
if not song_id_map and not playlist_id_map:
return
conn = sqlite3.connect(self._db_path, timeout=10)
conn.execute("PRAGMA journal_mode=WAL")
# Sange: hvis server gav et andet ID end det lokale, opdater
for local_id, server_id in song_id_map.items():
if local_id != server_id:
# Tjek om server-ID allerede eksisterer
existing = conn.execute(
"SELECT id FROM songs WHERE id=?", (server_id,)
).fetchone()
if not existing:
# Opdater lokal sang til server-ID
conn.execute(
"UPDATE songs SET id=?, server_synced=1 WHERE id=?",
(server_id, local_id)
)
# Opdater referencer
conn.execute(
"UPDATE files SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
conn.execute(
"UPDATE playlist_songs SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
conn.execute(
"UPDATE song_dances SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
conn.execute(
"UPDATE song_alt_dances SET song_id=? WHERE song_id=?",
(server_id, local_id)
)
else:
conn.execute(
"UPDATE songs SET server_synced=1 WHERE id=?", (local_id,)
)
# Playlister
for local_id, server_id in playlist_id_map.items():
conn.execute(
"UPDATE playlists SET api_project_id=? WHERE id=? OR api_project_id=?",
(server_id, local_id, local_id)
)
conn.commit()
conn.close()
# ── Anvend pull ─────────────────────────────────────────────────────────── # ── Anvend pull ───────────────────────────────────────────────────────────
def _apply_pull(self, data: dict): def _apply_pull(self, data: dict):
"""Gem server-data lokalt — opdaterer dans-info og importerer playlister.""" """Gem server-data lokalt."""
conn = sqlite3.connect(self._db_path) import uuid
conn = sqlite3.connect(self._db_path, timeout=10)
conn.row_factory = sqlite3.Row conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
# Opdater dans-info fra server try:
# Opdater dans-info
for d in data.get("dances", []): for d in data.get("dances", []):
if not d.get("name"): if not d.get("name"):
continue continue
existing = conn.execute( existing = conn.execute(
"SELECT id FROM dances WHERE name=? COLLATE NOCASE", (d["name"],) "SELECT id FROM dances WHERE name=? COLLATE NOCASE", (d["name"],)
).fetchone() ).fetchone()
if existing and (d.get("choreographer") or d.get("video_url") or d.get("stepsheet_url")): if existing and (d.get("choreographer") or d.get("video_url")):
conn.execute(""" conn.execute("""
UPDATE dances SET UPDATE dances SET
choreographer = CASE WHEN choreographer='' THEN ? ELSE choreographer END, choreographer = CASE WHEN choreographer='' THEN ? ELSE choreographer END,
@@ -305,8 +338,7 @@ class SyncManager:
""", (d.get("choreographer",""), d.get("video_url",""), """, (d.get("choreographer",""), d.get("video_url",""),
d.get("stepsheet_url",""), existing["id"])) d.get("stepsheet_url",""), existing["id"]))
# Importer/opdater egne playlister fra server — server er sandhed # Hent soft-slettede server-IDs så vi springer dem over
# Hent server-IDs på soft-slettede playlister så vi springer dem over
deleted_server_ids = { deleted_server_ids = {
row["api_project_id"] row["api_project_id"]
for row in conn.execute( for row in conn.execute(
@@ -315,13 +347,12 @@ class SyncManager:
).fetchall() ).fetchall()
} }
# Importer egne playlister
for pl in data.get("my_playlists", []): for pl in data.get("my_playlists", []):
server_id = pl.get("server_id") server_id = pl.get("server_id")
name = pl.get("name", "") name = pl.get("name", "")
if not server_id or not name: if not server_id or not name:
continue continue
# Spring over hvis listen er soft-slettet lokalt
if server_id in deleted_server_ids: if server_id in deleted_server_ids:
continue continue
@@ -331,52 +362,59 @@ class SyncManager:
if existing: if existing:
pl_id = existing["id"] pl_id = existing["id"]
# Opdater navn hvis det er ændret på serveren
conn.execute( conn.execute(
"UPDATE playlists SET name=? WHERE id=?", (name, pl_id) "UPDATE playlists SET name=? WHERE id=?", (name, pl_id)
) )
else: else:
cur = conn.execute( pl_id = str(uuid.uuid4())
"INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) " conn.execute(
"VALUES (?,?,?,1,'edit')", "INSERT INTO playlists (id, name, description, api_project_id, is_linked, server_permission) "
(name, pl.get("description",""), server_id) "VALUES (?,?,?,?,1,'edit')",
(pl_id, name, pl.get("description",""), server_id)
) )
pl_id = cur.lastrowid
# Genindlæs sange fra serveren — server er sandhed # Genindlæs sange
conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,)) conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,))
position = 1 position = 1
for song_data in pl.get("songs", []): songs_from_server = pl.get("songs", [])
logger.info(f"Pull: liste '{name}' har {len(songs_from_server)} sange")
for song_data in songs_from_server:
server_song_id = song_data.get("song_id", "")
title = song_data.get("title", "") title = song_data.get("title", "")
artist = song_data.get("artist", "") artist = song_data.get("artist", "")
if not title: mbid = song_data.get("mbid", "")
acoustid = song_data.get("acoustid", "")
if not title and not server_song_id:
continue continue
local = conn.execute(
"SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1", # Find eller opret sang lokalt
(title, artist) local_song_id = self._find_or_create_song_local(
).fetchone() conn, server_song_id, title, artist,
if not local: mbid=mbid, acoustid=acoustid,
import uuid bpm=song_data.get("bpm", 0),
new_id = str(uuid.uuid4()) duration_sec=song_data.get("duration_sec", 0),
conn.execute(
"INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)",
(new_id, title, artist)
) )
local_id = new_id
else: # Find tilgængelig fil til denne sang
local_id = local["id"] file_row = conn.execute(
"SELECT id FROM files WHERE song_id=? AND file_missing=0 LIMIT 1",
(local_song_id,)
).fetchone()
file_id = file_row["id"] if file_row else None
conn.execute(""" conn.execute("""
INSERT OR IGNORE INTO playlist_songs INSERT INTO playlist_songs
(playlist_id, song_id, position, status, is_workshop, dance_override) (id, playlist_id, song_id, file_id, position, status, is_workshop, dance_override)
VALUES (?,?,?,?,?,?) VALUES (?,?,?,?,?,?,?,?)
""", (pl_id, local_id, position, """, (str(uuid.uuid4()), pl_id, local_song_id, file_id, position,
song_data.get("status","pending"), song_data.get("status","pending"),
1 if song_data.get("is_workshop") else 0, 1 if song_data.get("is_workshop") else 0,
song_data.get("dance_override","") or "")) song_data.get("dance_override","") or ""))
position += 1 position += 1
# Importer delte playlister (read-only — is_linked=1, server_permission='view') # Importer delte playlister
for pl in data.get("shared", []): for pl in data.get("shared", []):
server_id = pl.get("server_id") server_id = pl.get("server_id")
name = pl.get("name", "") name = pl.get("name", "")
@@ -389,46 +427,98 @@ class SyncManager:
).fetchone() ).fetchone()
if existing: if existing:
# Opdater sange fra server (ejer kan have ændret listen)
pl_id = existing["id"] pl_id = existing["id"]
conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,)) conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,))
else: else:
cur = conn.execute( pl_id = str(uuid.uuid4())
"INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) " conn.execute(
"VALUES (?,?,?,1,'view')", "INSERT INTO playlists (id, name, description, api_project_id, is_linked, server_permission) "
(f"{name} ({owner})", "", server_id) "VALUES (?,?,?,?,1,'view')",
(pl_id, f"{name} ({owner})", "", server_id)
) )
pl_id = cur.lastrowid
position = 1 position = 1
for song_data in pl.get("songs", []): for song_data in pl.get("songs", []):
server_song_id = song_data.get("song_id", "")
title = song_data.get("title", "") title = song_data.get("title", "")
artist = song_data.get("artist", "") artist = song_data.get("artist", "")
if not title: if not title and not server_song_id:
continue continue
local = conn.execute(
"SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1", local_song_id = self._find_or_create_song_local(
(title, artist) conn, server_song_id, title, artist,
).fetchone() mbid=song_data.get("mbid", ""),
if not local: acoustid=song_data.get("acoustid", ""),
import uuid
new_id = str(uuid.uuid4())
conn.execute(
"INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)",
(new_id, title, artist)
) )
local_id = new_id file_row = conn.execute(
else: "SELECT id FROM files WHERE song_id=? AND file_missing=0 LIMIT 1",
local_id = local["id"] (local_song_id,)
).fetchone()
file_id = file_row["id"] if file_row else None
conn.execute(""" conn.execute("""
INSERT OR IGNORE INTO playlist_songs INSERT INTO playlist_songs
(playlist_id, song_id, position, status, is_workshop, dance_override) (id, playlist_id, song_id, file_id, position, status, is_workshop, dance_override)
VALUES (?,?,?,?,?,?) VALUES (?,?,?,?,?,?,?,?)
""", (pl_id, local_id, position, """, (str(uuid.uuid4()), pl_id, local_song_id, file_id, position,
song_data.get("status","pending"), song_data.get("status","pending"),
1 if song_data.get("is_workshop") else 0, 1 if song_data.get("is_workshop") else 0,
song_data.get("dance_override","") or "")) song_data.get("dance_override","") or ""))
position += 1 position += 1
conn.commit() conn.commit()
except Exception:
conn.rollback()
raise
finally:
conn.close() conn.close()
def _find_or_create_song_local(self, conn, server_song_id: str, title: str,
artist: str = "", mbid: str = "",
acoustid: str = "", bpm: int = 0,
duration_sec: int = 0) -> str:
"""Find eller opret sang lokalt. Returnerer lokal song_id."""
import uuid
# Match på server-ID
if server_song_id:
row = conn.execute(
"SELECT id FROM songs WHERE id=?", (server_song_id,)
).fetchone()
if row:
return row["id"]
# Match på MBID
if mbid:
row = conn.execute(
"SELECT id FROM songs WHERE mbid=?", (mbid,)
).fetchone()
if row:
return row["id"]
# Match på AcoustID
if acoustid:
row = conn.execute(
"SELECT id FROM songs WHERE acoustid=?", (acoustid,)
).fetchone()
if row:
return row["id"]
# Match på titel + artist
if title:
row = conn.execute(
"SELECT id FROM songs WHERE title=? AND artist=?", (title, artist)
).fetchone()
if row:
return row["id"]
# Opret ny — brug server-ID hvis tilgængeligt
new_id = server_song_id or str(uuid.uuid4())
conn.execute(
"INSERT INTO songs (id, title, artist, bpm, duration_sec, mbid, acoustid, server_synced) "
"VALUES (?,?,?,?,?,?,?,1)",
(new_id, title, artist, bpm, duration_sec, mbid or None, acoustid or None)
)
logger.info(f"Pull: oprettet sang '{title}' ({new_id})")
return new_id

View File

@@ -406,9 +406,13 @@ class PlaylistPanel(QWidget):
return False return False
# Hent sange med status, workshop og dans-override # Hent sange med status, workshop og dans-override
# JOIN songs — sangen er altid i songs tabellen (oprettet ved pull med file_missing=1)
# file_missing betyder bare at filen ikke er på denne maskine
songs_raw = conn.execute(""" songs_raw = conn.execute("""
SELECT s.*, ps.position, ps.status, SELECT s.id, s.title, s.artist, s.album,
ps.is_workshop, ps.dance_override s.bpm, s.duration_sec, s.file_format,
s.local_path, s.file_missing,
ps.position, ps.status, ps.is_workshop, ps.dance_override
FROM playlist_songs ps FROM playlist_songs ps
JOIN songs s ON s.id = ps.song_id JOIN songs s ON s.id = ps.song_id
WHERE ps.playlist_id=? ORDER BY ps.position WHERE ps.playlist_id=? ORDER BY ps.position
@@ -426,10 +430,10 @@ class PlaylistPanel(QWidget):
override = row["dance_override"] or "" override = row["dance_override"] or ""
active_dance = override if override else (dance_names[0] if dance_names else "") active_dance = override if override else (dance_names[0] if dance_names else "")
local_path = row["local_path"] local_path = row["local_path"] or ""
file_missing = bool(row["file_missing"]) file_missing = bool(row["file_missing"])
# Forsøg at finde sangen lokalt hvis den mangler # Forsøg at finde filen lokalt hvis den mangler på denne maskine
if file_missing or not local_path: if file_missing or not local_path:
match = conn.execute(""" match = conn.execute("""
SELECT local_path FROM songs SELECT local_path FROM songs
@@ -444,11 +448,11 @@ class PlaylistPanel(QWidget):
"id": row["id"], "id": row["id"],
"title": row["title"], "title": row["title"],
"artist": row["artist"], "artist": row["artist"],
"album": row["album"], "album": row["album"] or "",
"bpm": row["bpm"], "bpm": row["bpm"] or 0,
"duration_sec": row["duration_sec"], "duration_sec": row["duration_sec"] or 0,
"local_path": local_path, "local_path": local_path,
"file_format": row["file_format"], "file_format": row["file_format"] or "",
"file_missing": file_missing, "file_missing": file_missing,
"dances": dance_names, "dances": dance_names,
"active_dance": active_dance, "active_dance": active_dance,
@@ -605,9 +609,12 @@ class PlaylistPanel(QWidget):
else: else:
self._can_edit_server = False self._can_edit_server = False
with get_db() as conn: with get_db() as conn:
# JOIN songs — sangen er altid i songs tabellen (oprettet ved pull med file_missing=1)
songs_raw = conn.execute(""" songs_raw = conn.execute("""
SELECT s.*, ps.position, ps.status, SELECT s.id, s.title, s.artist, s.album,
ps.is_workshop, ps.dance_override s.bpm, s.duration_sec, s.file_format,
s.local_path, s.file_missing,
ps.position, ps.status, ps.is_workshop, ps.dance_override
FROM playlist_songs ps FROM playlist_songs ps
JOIN songs s ON s.id = ps.song_id JOIN songs s ON s.id = ps.song_id
WHERE ps.playlist_id=? ORDER BY ps.position WHERE ps.playlist_id=? ORDER BY ps.position
@@ -618,17 +625,17 @@ class PlaylistPanel(QWidget):
for row in songs_raw: for row in songs_raw:
dances = conn.execute(""" dances = conn.execute("""
SELECT d.name FROM song_dances sd SELECT d.name FROM song_dances sd
JOIN dances d ON d.id = sd.dance_id JOIN dances d ON d.id = sad.dance_id
WHERE sd.song_id=? ORDER BY sd.dance_order WHERE sd.song_id=? ORDER BY sd.dance_order
""", (row["id"],)).fetchall() """, (row["id"],)).fetchall()
dance_names = [d["name"] for d in dances] dance_names = [d["name"] for d in dances]
override = row["dance_override"] or "" override = row["dance_override"] or ""
active_dance = override if override else (dance_names[0] if dance_names else "") active_dance = override if override else (dance_names[0] if dance_names else "")
local_path = row["local_path"] local_path = row["local_path"] or ""
file_missing = bool(row["file_missing"]) file_missing = bool(row["file_missing"])
# Forsøg at finde sangen lokalt hvis den mangler # Forsøg at finde filen lokalt hvis den mangler på denne maskine
if file_missing or not local_path: if file_missing or not local_path:
match = conn.execute(""" match = conn.execute("""
SELECT local_path FROM songs SELECT local_path FROM songs
@@ -644,11 +651,11 @@ class PlaylistPanel(QWidget):
"id": row["id"], "id": row["id"],
"title": row["title"], "title": row["title"],
"artist": row["artist"], "artist": row["artist"],
"album": row["album"], "album": row["album"] or "",
"bpm": row["bpm"], "bpm": row["bpm"] or 0,
"duration_sec": row["duration_sec"], "duration_sec": row["duration_sec"] or 0,
"local_path": local_path, "local_path": local_path,
"file_format": row["file_format"], "file_format": row["file_format"] or "",
"file_missing": file_missing, "file_missing": file_missing,
"dances": dance_names, "dances": dance_names,
"active_dance": active_dance, "active_dance": active_dance,