From efc30cdbb266829855c8637988d3a7860744c562 Mon Sep 17 00:00:00 2001 From: Carsten Kvist Date: Sun, 19 Apr 2026 23:45:59 +0200 Subject: [PATCH] NY db struktur --- linedance-api/app/models/__init__.py | 76 +- linedance-api/app/routers/sync.py | 316 +++---- linedance-app/local/local_db.py | 1192 ++++++++++---------------- linedance-app/local/scanner.py | 221 ++--- linedance-app/local/sync_manager.py | 598 +++++++------ linedance-app/ui/playlist_panel.py | 43 +- 6 files changed, 1056 insertions(+), 1390 deletions(-) diff --git a/linedance-api/app/models/__init__.py b/linedance-api/app/models/__init__.py index ed17f9ad..8c6fc8bb 100644 --- a/linedance-api/app/models/__init__.py +++ b/linedance-api/app/models/__init__.py @@ -26,34 +26,30 @@ class User(Base): verify_token: Mapped[str|None] = mapped_column(String(64), nullable=True) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) - projects: Mapped[list["Project"]] = relationship("Project", back_populates="owner") - memberships: Mapped[list["ProjectMember"]] = relationship("ProjectMember", back_populates="user") - songs: Mapped[list["Song"]] = relationship("Song", back_populates="owner") - alt_ratings: Mapped[list["DanceAltRating"]] = relationship("DanceAltRating", back_populates="user") - playlist_shares: Mapped[list["PlaylistShare"]] = relationship("PlaylistShare", foreign_keys="PlaylistShare.shared_with_id", back_populates="shared_with") + projects: Mapped[list["Project"]] = relationship("Project", back_populates="owner") + memberships: Mapped[list["ProjectMember"]] = relationship("ProjectMember", back_populates="user") + alt_ratings: Mapped[list["DanceAltRating"]] = relationship("DanceAltRating", back_populates="user") + playlist_shares: Mapped[list["PlaylistShare"]] = relationship("PlaylistShare", foreign_keys="PlaylistShare.shared_with_id", back_populates="shared_with") -# ── Song ────────────────────────────────────────────────────────────────────── +# ── Song (global — ikke knyttet til en bruger) ──────────────────────────────── class Song(Base): __tablename__ = "songs" id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) - owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) title: Mapped[str] = mapped_column(String(255), nullable=False) artist: Mapped[str] = mapped_column(String(255), default="") album: Mapped[str] = mapped_column(String(255), default="") bpm: Mapped[int] = mapped_column(Integer, default=0) duration_sec: Mapped[int] = mapped_column(Integer, default=0) - file_format: Mapped[str] = mapped_column(String(8), default="") - mbid: Mapped[str|None] = mapped_column(String(36), nullable=True) + mbid: Mapped[str|None] = mapped_column(String(36), nullable=True, unique=True) acoustid: Mapped[str|None] = mapped_column(String(64), nullable=True) synced_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) - owner: Mapped["User"] = relationship("User", back_populates="songs") - project_songs: Mapped[list["ProjectSong"]] = relationship("ProjectSong", back_populates="song") - song_dances: Mapped[list["SongDance"]] = relationship("SongDance", back_populates="song", cascade="all, delete-orphan") - song_alt_dances: Mapped[list["SongAltDance"]] = relationship("SongAltDance", back_populates="song", cascade="all, delete-orphan") + project_songs: Mapped[list["ProjectSong"]] = relationship("ProjectSong", back_populates="song") + song_dances: Mapped[list["SongDance"]] = relationship("SongDance", back_populates="song", cascade="all, delete-orphan") + song_alt_dances: Mapped[list["SongAltDance"]] = relationship("SongAltDance", back_populates="song", cascade="all, delete-orphan") # ── Dans-entitet ────────────────────────────────────────────────────────────── @@ -68,7 +64,6 @@ class DanceLevel(Base): class Dance(Base): - """Dans-entitet: navn + niveau er unik kombination.""" __tablename__ = "dances" __table_args__ = (UniqueConstraint("name", "level_id", name="uq_dance_name_level"),) @@ -80,7 +75,6 @@ class Dance(Base): stepsheet_url: Mapped[str] = mapped_column(String(512), default="") notes: Mapped[str] = mapped_column(Text, default="") use_count: Mapped[int] = mapped_column(Integer, default=1) - source: Mapped[str] = mapped_column(String(16), default="local") synced_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True) level: Mapped["DanceLevel|None"] = relationship("DanceLevel") @@ -95,7 +89,7 @@ class Project(Base): owner_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) name: Mapped[str] = mapped_column(String(128), nullable=False) description: Mapped[str] = mapped_column(Text, default="") - visibility: Mapped[str] = mapped_column(String(16), default="private") # private|shared|public + visibility: Mapped[str] = mapped_column(String(16), default="private") updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, onupdate=now_utc) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) @@ -111,8 +105,8 @@ class ProjectMember(Base): id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False) user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) - role: Mapped[str] = mapped_column(String(16), default="viewer") # owner|editor|viewer - status: Mapped[str] = mapped_column(String(16), default="pending") # pending|accepted + role: Mapped[str] = mapped_column(String(16), default="viewer") + status: Mapped[str] = mapped_column(String(16), default="pending") invited_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) project: Mapped["Project"] = relationship("Project", back_populates="members") @@ -122,28 +116,27 @@ class ProjectMember(Base): class ProjectSong(Base): __tablename__ = "project_songs" - id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) - project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False) - song_id: Mapped[str] = mapped_column(String(36), ForeignKey("songs.id"), nullable=False) - position: Mapped[int] = mapped_column(Integer, nullable=False) - status: Mapped[str] = mapped_column(String(16), default="pending") - is_workshop: Mapped[bool] = mapped_column(Boolean, default=False) - dance_override: Mapped[str] = mapped_column(String(128), default="") + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) + project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False) + song_id: Mapped[str] = mapped_column(String(36), ForeignKey("songs.id"), nullable=False) + position: Mapped[int] = mapped_column(Integer, nullable=False) + status: Mapped[str] = mapped_column(String(16), default="pending") + is_workshop: Mapped[bool] = mapped_column(Boolean, default=False) + dance_override: Mapped[str] = mapped_column(String(128), default="") project: Mapped["Project"] = relationship("Project", back_populates="project_songs") song: Mapped["Song"] = relationship("Song", back_populates="project_songs") class PlaylistShare(Base): - """Deling af en playlist med specifikke brugere.""" __tablename__ = "playlist_shares" __table_args__ = (UniqueConstraint("project_id", "shared_with_id", name="uq_share"),) id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) project_id: Mapped[str] = mapped_column(String(36), ForeignKey("projects.id"), nullable=False) shared_with_id: Mapped[str|None] = mapped_column(String(36), ForeignKey("users.id"), nullable=True) - invited_email: Mapped[str] = mapped_column(String(255), default="") # til ikke-registrerede - permission: Mapped[str] = mapped_column(String(16), default="view") # view|copy|edit + invited_email: Mapped[str] = mapped_column(String(255), default="") + permission: Mapped[str] = mapped_column(String(16), default="view") accepted_at: Mapped[datetime|None] = mapped_column(DateTime, nullable=True) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) @@ -154,28 +147,26 @@ class PlaylistShare(Base): # ── Sang-dans tags ──────────────────────────────────────────────────────────── class SongDance(Base): - """Dans-tags på en sang (brugerens egne tags).""" __tablename__ = "song_dances" __table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_dance"),) - id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) - song_id: Mapped[str] = mapped_column(String(36), ForeignKey("songs.id"), nullable=False) - dance_id: Mapped[int] = mapped_column(Integer, ForeignKey("dances.id"), nullable=False) - dance_order: Mapped[int] = mapped_column(Integer, default=1) + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) + song_id: Mapped[str] = mapped_column(String(36), ForeignKey("songs.id"), nullable=False) + dance_id: Mapped[int] = mapped_column(Integer, ForeignKey("dances.id"), nullable=False) + dance_order: Mapped[int] = mapped_column(Integer, default=1) song: Mapped["Song"] = relationship("Song", back_populates="song_dances") dance: Mapped["Dance"] = relationship("Dance") class SongAltDance(Base): - """Alternativ-dans tags på en sang.""" __tablename__ = "song_alt_dances" __table_args__ = (UniqueConstraint("song_id", "dance_id", name="uq_song_alt_dance"),) - id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) - song_id: Mapped[str] = mapped_column(String(36), ForeignKey("songs.id"), nullable=False) - dance_id: Mapped[int] = mapped_column(Integer, ForeignKey("dances.id"), nullable=False) - note: Mapped[str] = mapped_column(String(255), default="") + id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) + song_id: Mapped[str] = mapped_column(String(36), ForeignKey("songs.id"), nullable=False) + dance_id: Mapped[int] = mapped_column(Integer, ForeignKey("dances.id"), nullable=False) + note: Mapped[str] = mapped_column(String(255), default="") song: Mapped["Song"] = relationship("Song", back_populates="song_alt_dances") dance: Mapped["Dance"] = relationship("Dance") @@ -184,7 +175,6 @@ class SongAltDance(Base): # ── Community dans-tags ─────────────────────────────────────────────────────── class CommunityDance(Base): - """Fællesskabets dans-tags på sange.""" __tablename__ = "community_dances" __table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "dance_id", name="uq_comm_dance"),) @@ -200,7 +190,6 @@ class CommunityDance(Base): class CommunityDanceAlt(Base): - """Fællesskabets alternativ-danse til en sang med ratings.""" __tablename__ = "community_dance_alts" __table_args__ = (UniqueConstraint("song_mbid", "song_title", "song_artist", "alt_dance_id", name="uq_comm_alt"),) @@ -215,20 +204,19 @@ class CommunityDanceAlt(Base): rating_count: Mapped[int] = mapped_column(Integer, default=0) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) - alt_dance: Mapped["Dance"] = relationship("Dance") + alt_dance: Mapped["Dance"] = relationship("Dance") ratings: Mapped[list["DanceAltRating"]] = relationship("DanceAltRating", back_populates="alternative", cascade="all, delete-orphan") class DanceAltRating(Base): - """1-5 stjerne rating af en alternativ-dans.""" __tablename__ = "dance_alt_ratings" __table_args__ = (UniqueConstraint("alternative_id", "user_id", name="uq_rating"),) id: Mapped[str] = mapped_column(String(36), primary_key=True, default=new_uuid) alternative_id: Mapped[str] = mapped_column(String(36), ForeignKey("community_dance_alts.id"), nullable=False) user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id"), nullable=False) - score: Mapped[int] = mapped_column(Integer, nullable=False) # 1-5 + score: Mapped[int] = mapped_column(Integer, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc) alternative: Mapped["CommunityDanceAlt"] = relationship("CommunityDanceAlt", back_populates="ratings") - user: Mapped["User"] = relationship("User", back_populates="alt_ratings") + user: Mapped["User"] = relationship("User", back_populates="alt_ratings") \ No newline at end of file diff --git a/linedance-api/app/routers/sync.py b/linedance-api/app/routers/sync.py index 50928393..e3ebe6bf 100644 --- a/linedance-api/app/routers/sync.py +++ b/linedance-api/app/routers/sync.py @@ -4,20 +4,22 @@ sync.py — Push/pull synkronisering mellem lokal app og server. POST /sync/push — send lokal data op til server GET /sync/pull — hent server-data ned til app """ +import uuid +import logging from datetime import datetime, timezone -from fastapi import APIRouter, Depends, HTTPException +from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from pydantic import BaseModel -from typing import Optional from app.core.database import get_db from app.core.security import get_current_user from app.models import ( User, Song, Dance, DanceLevel, Project, ProjectSong, - PlaylistShare, CommunityDance, CommunityDanceAlt, + PlaylistShare, CommunityDance, SongDance, SongAltDance, ) router = APIRouter(prefix="/sync", tags=["sync"]) +logger = logging.getLogger(__name__) # ── Schemas ─────────────────────────────────────────────────────────────────── @@ -29,7 +31,6 @@ class SongData(BaseModel): album: str = "" bpm: int = 0 duration_sec: int = 0 - file_format: str = "" mbid: str = "" acoustid: str = "" @@ -42,16 +43,16 @@ class DanceData(BaseModel): notes: str = "" class SongDanceData(BaseModel): - song_local_id: str - dance_name: str - level_name: str = "" - dance_order: int = 1 + song_local_id: str + dance_name: str + level_name: str = "" + dance_order: int = 1 class SongAltDanceData(BaseModel): - song_local_id: str - dance_name: str - level_name: str = "" - note: str = "" + song_local_id: str + dance_name: str + level_name: str = "" + note: str = "" class PlaylistSongData(BaseModel): song_local_id: str @@ -71,12 +72,65 @@ class PlaylistData(BaseModel): songs: list[PlaylistSongData] = [] class PushPayload(BaseModel): - songs: list[SongData] = [] - dances: list[DanceData] = [] - song_dances: list[SongDanceData] = [] - song_alts: list[SongAltDanceData] = [] - playlists: list[PlaylistData] = [] - deleted_playlists: list[str] = [] # server-IDs (api_project_id) på slettede playlister + songs: list[SongData] = [] + dances: list[DanceData] = [] + song_dances: list[SongDanceData] = [] + song_alts: list[SongAltDanceData] = [] + playlists: list[PlaylistData] = [] + deleted_playlists: list[str] = [] # server-IDs (Project.id) + + +# ── Hjælpefunktion: find eller opret sang globalt ───────────────────────────── + +def _find_or_create_song(db: Session, title: str, artist: str = "", + mbid: str = "", acoustid: str = "", + album: str = "", bpm: int = 0, + duration_sec: int = 0) -> Song: + """ + Match-hierarki: + 1. MBID — sikreste + 2. AcoustID + 3. Titel + artist + 4. Opret ny + """ + if mbid: + song = db.query(Song).filter_by(mbid=mbid).first() + if song: + return song + + if acoustid: + song = db.query(Song).filter_by(acoustid=acoustid).first() + if song: + # Tilføj mbid hvis den mangler + if mbid and not song.mbid: + song.mbid = mbid + return song + + if title: + song = db.query(Song).filter( + Song.title == title, + Song.artist == artist, + ).first() + if song: + # Opdater med bedre data hvis tilgængeligt + if mbid and not song.mbid: + song.mbid = mbid + if acoustid and not song.acoustid: + song.acoustid = acoustid + if bpm and not song.bpm: + song.bpm = bpm + return song + + # Opret ny global sang + song = Song( + title=title, artist=artist, album=album, + bpm=bpm, duration_sec=duration_sec, + mbid=mbid or None, + acoustid=acoustid or None, + ) + db.add(song) + db.flush() + return song # ── Push ────────────────────────────────────────────────────────────────────── @@ -88,78 +142,50 @@ def push( me: User = Depends(get_current_user), ): """Upload lokal data til server. Returnerer server-IDs.""" - song_id_map = {} # local_id → server Song.id - dance_id_map = {} # "name|level" → server Dance.id - level_map = {} # level_name → DanceLevel.id + import sqlalchemy as _sa + + song_id_map = {} # local_id → server Song.id + dance_id_map = {} # "name|level_id" → Dance.id + level_map = {} # level_name.lower() → DanceLevel.id # ── Dans-niveauer ───────────────────────────────────────────────────────── for lvl in db.query(DanceLevel).all(): level_map[lvl.name.lower()] = lvl.id - # ── Sange ───────────────────────────────────────────────────────────────── + # ── Sange (globale) ─────────────────────────────────────────────────────── for s in payload.songs: if not s.title: continue - # Match 1: MBID — sikrest - existing = None - if s.mbid: - existing = db.query(Song).filter_by(mbid=s.mbid).first() - # Match 2: titel+artist globalt - if not existing: - existing = db.query(Song).filter( - Song.title == s.title, - Song.artist == s.artist, - ).first() - if existing: - song_id_map[s.local_id] = existing.id - # Opdater BPM og MBID hvis de mangler - if s.bpm and not existing.bpm: - existing.bpm = s.bpm - if s.mbid and not existing.mbid: - existing.mbid = s.mbid - if s.acoustid and not existing.acoustid: - existing.acoustid = s.acoustid - else: - song = Song( - owner_id=me.id, - title=s.title, artist=s.artist, album=s.album, - bpm=s.bpm, duration_sec=s.duration_sec, - file_format=s.file_format, - mbid=s.mbid or None, - acoustid=s.acoustid or None, - ) - db.add(song) - db.flush() - song_id_map[s.local_id] = song.id + song = _find_or_create_song( + db, s.title, s.artist, + mbid=s.mbid, acoustid=s.acoustid, + album=s.album, bpm=s.bpm, duration_sec=s.duration_sec, + ) + song_id_map[s.local_id] = song.id - # ── Danse ────────────────────────────────────────────────────────────────── + # ── Danse ───────────────────────────────────────────────────────────────── for d in payload.dances: level_id = level_map.get(d.level_name.lower()) if d.level_name else None key = f"{d.name.lower()}|{level_id}" existing = db.query(Dance).filter_by(name=d.name, level_id=level_id).first() if existing: - # Opdater info hvis den har ny data - if d.choreographer: existing.choreographer = d.choreographer - if d.video_url: existing.video_url = d.video_url - if d.stepsheet_url: existing.stepsheet_url = d.stepsheet_url - if d.notes: existing.notes = d.notes + if d.choreographer: existing.choreographer = d.choreographer + if d.video_url: existing.video_url = d.video_url + if d.stepsheet_url: existing.stepsheet_url = d.stepsheet_url dance_id_map[key] = existing.id else: dance = Dance( name=d.name, level_id=level_id, - choreographer=d.choreographer, video_url=d.video_url, - stepsheet_url=d.stepsheet_url, notes=d.notes, + choreographer=d.choreographer, + video_url=d.video_url, + stepsheet_url=d.stepsheet_url, + notes=d.notes, ) db.add(dance) db.flush() dance_id_map[key] = dance.id - # ── Sang-dans tags (brugerens egne) ─────────────────────────────────────── - from app.models import SongDance, SongAltDance # ── Sang-dans tags ──────────────────────────────────────────────────────── - from app.models import SongDance, SongAltDance - import sqlalchemy as _sa - for sd in payload.song_dances: song_id = song_id_map.get(sd.song_local_id) if not song_id: @@ -172,12 +198,8 @@ def push( db.execute(_sa.text( "INSERT IGNORE INTO song_dances (id, song_id, dance_id, dance_order) " "VALUES (:id, :song_id, :dance_id, :dance_order)" - ), { - "id": str(__import__("uuid").uuid4()), - "song_id": song_id, - "dance_id": dance_id, - "dance_order": sd.dance_order, - }) + ), {"id": str(uuid.uuid4()), "song_id": song_id, + "dance_id": dance_id, "dance_order": sd.dance_order}) for sa in payload.song_alts: song_id = song_id_map.get(sa.song_local_id) @@ -191,35 +213,27 @@ def push( db.execute(_sa.text( "INSERT IGNORE INTO song_alt_dances (id, song_id, dance_id, note) " "VALUES (:id, :song_id, :dance_id, :note)" - ), { - "id": str(__import__("uuid").uuid4()), - "song_id": song_id, - "dance_id": dance_id, - "note": sa.note or "", - }) + ), {"id": str(uuid.uuid4()), "song_id": song_id, + "dance_id": dance_id, "note": sa.note or ""}) # ── Playlister ──────────────────────────────────────────────────────────── - # VIGTIGT: Match altid på local_id (= api_project_id på klienten), - # aldrig på navn — navn er ikke unikt og giver duplikater. playlist_id_map = {} for pl in payload.playlists: - # Prøv først at finde via server-ID (local_id er klientens lokale db-id - # som tidligere er returneret som server-ID via playlist_id_map) + # Find eksisterende via server-ID (local_id er api_project_id på klienten) existing = None if pl.local_id: existing = db.query(Project).filter_by( id=pl.local_id, owner_id=me.id ).first() - # Fallback: navn — kun hvis vi aldrig har set denne liste før if not existing: existing = db.query(Project).filter_by( owner_id=me.id, name=pl.name ).first() + if existing: existing.name = pl.name existing.description = pl.description existing.visibility = pl.visibility - # Opdater kun sange hvis push faktisk har sange med if pl.songs: db.query(ProjectSong).filter_by(project_id=existing.id).delete() project = existing @@ -233,27 +247,21 @@ def push( playlist_id_map[pl.local_id] = project.id for ps in pl.songs: - # Prøv først via song_id_map (lokal ID) + # Find sang via song_id_map eller titel+artist song_id = song_id_map.get(ps.song_local_id) - # Fallback: match på titel+artist if not song_id and ps.song_title: - existing_song = db.query(Song).filter_by( - title=ps.song_title, artist=ps.song_artist - ).first() - if existing_song: - song_id = existing_song.id + song = _find_or_create_song(db, ps.song_title, ps.song_artist) + song_id = song.id if not song_id: continue - proj_song = ProjectSong( + db.add(ProjectSong( project_id=project.id, song_id=song_id, position=ps.position, status=ps.status, is_workshop=ps.is_workshop, dance_override=ps.dance_override, - ) - db.add(proj_song) + )) - # ── Slet playlister der er fjernet lokalt ───────────────────────────────── - # Klienten sender api_project_id (= server Project.id) som strings + # ── Slet playlister ─────────────────────────────────────────────────────── for project_id in payload.deleted_playlists: proj = db.query(Project).filter_by(id=project_id, owner_id=me.id).first() if proj: @@ -266,6 +274,7 @@ def push( "status": "ok", "songs_synced": len(song_id_map), "playlists_synced": len(playlist_id_map), + "song_id_map": {k: str(v) for k, v in song_id_map.items()}, "playlist_id_map": {k: str(v) for k, v in playlist_id_map.items()}, } @@ -285,99 +294,93 @@ def pull( for l in db.query(DanceLevel).order_by(DanceLevel.sort_order).all() ] - # Danse med info + # Danse dances = [ { - "name": d.name, - "level_id": d.level_id, + "name": d.name, + "level_id": d.level_id, "choreographer": d.choreographer, - "video_url": d.video_url, + "video_url": d.video_url, "stepsheet_url": d.stepsheet_url, - "notes": d.notes, - "use_count": d.use_count, + "notes": d.notes, + "use_count": d.use_count, } for d in db.query(Dance).order_by(Dance.use_count.desc()).limit(500).all() ] - # Community dans-tags (populære) - community = [] - for cd in db.query(CommunityDance).limit(1000).all(): - community.append({ - "song_title": cd.song_title, - "song_artist": cd.song_artist, - "dance_id": cd.dance_id, - }) - - # Delte playlister (read-only — kun ejeren kan redigere) - shared_ids = set() - for s in db.query(PlaylistShare).filter( - (PlaylistShare.shared_with_id == me.id) | - (PlaylistShare.invited_email == me.email) - ).all(): - shared_ids.add(s.project_id) + # Delte playlister + shared_ids = { + s.project_id for s in db.query(PlaylistShare).filter( + (PlaylistShare.shared_with_id == me.id) | + (PlaylistShare.invited_email == me.email) + ).all() + } shared = [] for p in db.query(Project).filter(Project.id.in_(shared_ids)).all(): if p.owner_id == me.id: - continue # Egne lister håndteres separat + continue owner = db.query(User).filter_by(id=p.owner_id).first() - songs_out = [] - for ps in p.project_songs: - song = db.query(Song).filter_by(id=ps.song_id).first() - if not song: - continue - songs_out.append({ - "title": song.title, - "artist": song.artist, - "position": ps.position, - "status": ps.status, - "is_workshop": ps.is_workshop, - "dance_override": ps.dance_override or "", - }) shared.append({ "server_id": p.id, "name": p.name, "owner": owner.username if owner else "?", - "songs": sorted(songs_out, key=lambda x: x["position"]), + "songs": [ + { + "song_id": str(ps.song_id), + "title": ps.song.title, + "artist": ps.song.artist, + "mbid": ps.song.mbid or "", + "acoustid": ps.song.acoustid or "", + "bpm": ps.song.bpm, + "duration_sec": ps.song.duration_sec, + "position": ps.position, + "status": ps.status, + "is_workshop": ps.is_workshop, + "dance_override": ps.dance_override or "", + } + for ps in sorted(p.project_songs, key=lambda x: x.position) + if ps.song + ], }) # Egne playlister my_playlists = [] - all_projects = db.query(Project).filter_by(owner_id=me.id).all() - import logging - logging.getLogger(__name__).info(f"Pull: fandt {len(all_projects)} projekter for {me.id}") - for p in all_projects: - songs_out = [] - for ps in p.project_songs: - song = db.query(Song).filter_by(id=ps.song_id).first() - if not song: - continue - songs_out.append({ - "title": song.title, - "artist": song.artist, - "position": ps.position, - "status": ps.status, - "is_workshop": ps.is_workshop, - "dance_override": ps.dance_override or "", - }) + for p in db.query(Project).filter_by(owner_id=me.id).all(): my_playlists.append({ "server_id": p.id, "name": p.name, "description": p.description or "", - "songs": sorted(songs_out, key=lambda x: x["position"]), + "songs": [ + { + "song_id": str(ps.song_id), + "title": ps.song.title, + "artist": ps.song.artist, + "mbid": ps.song.mbid or "", + "acoustid": ps.song.acoustid or "", + "bpm": ps.song.bpm, + "duration_sec": ps.song.duration_sec, + "position": ps.position, + "status": ps.status, + "is_workshop": ps.is_workshop, + "dance_override": ps.dance_override or "", + } + for ps in sorted(p.project_songs, key=lambda x: x.position) + if ps.song + ], }) - # Brugerens egne dans-tags - from app.models import SongDance, SongAltDance + logger.info(f"Pull: {len(my_playlists)} playlister for {me.username}") + + # Dans-tags (brugerens egne) song_tags = [] - for sd in db.query(SongDance).join(Song).filter(Song.owner_id == me.id).all(): + for sd in db.query(SongDance).all(): dance = db.query(Dance).filter_by(id=sd.dance_id).first() if not dance: continue level = db.query(DanceLevel).filter_by(id=dance.level_id).first() if dance.level_id else None song_tags.append({ - "song_title": sd.song.title, - "song_artist": sd.song.artist, + "song_id": sd.song_id, "dance_name": dance.name, "level_name": level.name if level else "", "dance_order": sd.dance_order, @@ -386,7 +389,6 @@ def pull( return { "levels": levels, "dances": dances, - "community": community, "shared": shared, "my_playlists": my_playlists, "song_tags": song_tags, diff --git a/linedance-app/local/local_db.py b/linedance-app/local/local_db.py index cb63f989..ef17f5c5 100644 --- a/linedance-app/local/local_db.py +++ b/linedance-app/local/local_db.py @@ -1,671 +1,466 @@ """ -local_db.py — Lokal SQLite database til offline brug. +local_db.py — Lokal SQLite database for LineDance Player v0.9 -Håndterer: - - Musikbiblioteker (stier der overvåges) - - Sange høstet fra filsystemet - - Lokale afspilningslister (offline-projekter) - - Synkroniseringsstatus mod API +Ny arkitektur: + songs — global katalog (synkroniseret med server, server-UUID som ID) + files — lokalt fil-index (kun denne maskine) + playlist_songs — refererer til song_id + valgfri file_id """ import sqlite3 -import threading +import logging from contextlib import contextmanager -from datetime import datetime, timezone from pathlib import Path +logger = logging.getLogger(__name__) + DB_PATH = Path.home() / ".linedance" / "local.db" -_local = threading.local() -_global_conn: sqlite3.Connection | None = None - -def _get_conn() -> sqlite3.Connection: - """Returnerer en global forbindelse i autocommit mode.""" - global _global_conn - if _global_conn is None: - DB_PATH.parent.mkdir(parents=True, exist_ok=True) - _global_conn = sqlite3.connect(str(DB_PATH), check_same_thread=False, - isolation_level=None) # autocommit - _global_conn.row_factory = sqlite3.Row - _global_conn.execute("PRAGMA journal_mode=WAL") - _global_conn.execute("PRAGMA foreign_keys=ON") - return _global_conn - - -def new_conn() -> sqlite3.Connection: - """Åbn en frisk forbindelse til brug i tag_editor og dialogs.""" - conn = sqlite3.connect(str(DB_PATH), check_same_thread=False, timeout=10) - conn.row_factory = sqlite3.Row - conn.execute("PRAGMA journal_mode=WAL") - conn.execute("PRAGMA foreign_keys=OFF") - return conn +def get_db_path() -> Path: + return DB_PATH @contextmanager def get_db(): - """Context manager der bruger app-forbindelsen i autocommit mode. - Hver statement committer med det samme — ingen eksplicit transaktion.""" - conn = _get_conn() + DB_PATH.parent.mkdir(parents=True, exist_ok=True) + conn = sqlite3.connect(DB_PATH, timeout=10) + conn.row_factory = sqlite3.Row + conn.execute("PRAGMA journal_mode=WAL") + conn.execute("PRAGMA foreign_keys=ON") try: yield conn + conn.commit() except Exception: + conn.rollback() raise + finally: + conn.close() -def get_db_raw() -> sqlite3.Connection: - return _get_conn() +# ── Schema ──────────────────────────────────────────────────────────────────── + +SCHEMA = """ +-- Musik-biblioteker (mapper brugeren har tilføjet) +CREATE TABLE IF NOT EXISTS libraries ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + path TEXT NOT NULL UNIQUE, + is_active INTEGER NOT NULL DEFAULT 1, + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +-- Global sang-katalog (synkroniseret med server) +-- ID er server-UUID. Sange uden server-ID har et lokalt UUID. +CREATE TABLE IF NOT EXISTS songs ( + id TEXT PRIMARY KEY, + title TEXT NOT NULL DEFAULT '', + artist TEXT NOT NULL DEFAULT '', + album TEXT NOT NULL DEFAULT '', + bpm INTEGER NOT NULL DEFAULT 0, + duration_sec INTEGER NOT NULL DEFAULT 0, + mbid TEXT UNIQUE, + acoustid TEXT, + server_synced INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +-- Lokalt fil-index (kun denne maskine) +CREATE TABLE IF NOT EXISTS files ( + id TEXT PRIMARY KEY, + song_id TEXT NOT NULL REFERENCES songs(id) ON DELETE CASCADE, + local_path TEXT NOT NULL UNIQUE, + file_missing INTEGER NOT NULL DEFAULT 0, + file_format TEXT NOT NULL DEFAULT '', + file_modified_at TEXT NOT NULL DEFAULT '', + extra_tags TEXT NOT NULL DEFAULT '{}', + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +CREATE INDEX IF NOT EXISTS idx_files_song_id ON files(song_id); +CREATE INDEX IF NOT EXISTS idx_files_missing ON files(file_missing); + +-- Dans-niveauer +CREATE TABLE IF NOT EXISTS dance_levels ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + sort_order INTEGER NOT NULL, + name TEXT NOT NULL UNIQUE, + description TEXT NOT NULL DEFAULT '' +); + +-- Danse +CREATE TABLE IF NOT EXISTS dances ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL, + level_id INTEGER REFERENCES dance_levels(id), + choreographer TEXT NOT NULL DEFAULT '', + video_url TEXT NOT NULL DEFAULT '', + stepsheet_url TEXT NOT NULL DEFAULT '', + notes TEXT NOT NULL DEFAULT '', + use_count INTEGER NOT NULL DEFAULT 1, + source TEXT NOT NULL DEFAULT 'local', + UNIQUE(name, level_id) +); + +-- Sang-dans tags +CREATE TABLE IF NOT EXISTS song_dances ( + id TEXT PRIMARY KEY, + song_id TEXT NOT NULL REFERENCES songs(id) ON DELETE CASCADE, + dance_id INTEGER NOT NULL REFERENCES dances(id), + dance_order INTEGER NOT NULL DEFAULT 1, + UNIQUE(song_id, dance_id) +); + +-- Alternativ-dans tags +CREATE TABLE IF NOT EXISTS song_alt_dances ( + id TEXT PRIMARY KEY, + song_id TEXT NOT NULL REFERENCES songs(id) ON DELETE CASCADE, + dance_id INTEGER NOT NULL REFERENCES dances(id), + note TEXT NOT NULL DEFAULT '', + source TEXT NOT NULL DEFAULT 'local', + created_at TEXT NOT NULL DEFAULT (datetime('now')), + UNIQUE(song_id, dance_id) +); + +CREATE INDEX IF NOT EXISTS idx_song_dances ON song_dances(song_id); +CREATE INDEX IF NOT EXISTS idx_song_alt_dances ON song_alt_dances(song_id); + +-- Playlister +CREATE TABLE IF NOT EXISTS playlists ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + description TEXT NOT NULL DEFAULT '', + tags TEXT NOT NULL DEFAULT '', + api_project_id TEXT UNIQUE, + is_linked INTEGER NOT NULL DEFAULT 0, + server_permission TEXT NOT NULL DEFAULT 'edit', + is_deleted INTEGER NOT NULL DEFAULT 0, + created_at TEXT NOT NULL DEFAULT (datetime('now')) +); + +-- Playliste-sange +CREATE TABLE IF NOT EXISTS playlist_songs ( + id TEXT PRIMARY KEY, + playlist_id TEXT NOT NULL REFERENCES playlists(id) ON DELETE CASCADE, + song_id TEXT NOT NULL REFERENCES songs(id), + file_id TEXT REFERENCES files(id), + position INTEGER NOT NULL, + status TEXT NOT NULL DEFAULT 'pending', + is_workshop INTEGER NOT NULL DEFAULT 0, + dance_override TEXT NOT NULL DEFAULT '' +); + +CREATE INDEX IF NOT EXISTS idx_playlist_songs_playlist ON playlist_songs(playlist_id); +CREATE INDEX IF NOT EXISTS idx_playlist_songs_song ON playlist_songs(song_id); + +-- Event-state (gemmes løbende) +CREATE TABLE IF NOT EXISTS event_state ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL +); + +-- Dans-navne til autoudfyld +CREATE TABLE IF NOT EXISTS dance_names ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, + use_count INTEGER NOT NULL DEFAULT 1 +); +""" + +DEFAULT_DANCE_LEVELS = [ + (10, "Absolute Beginner", "Ingen tidligere danse-erfaring kræves"), + (20, "Beginner", "Lidt tidligere erfaring"), + (30, "High Beginner", "God begynder, klar til mere"), + (40, "Low Improver", "Begyndende øvet"), + (50, "Improver", "Grundlæggende færdigheder på plads"), + (60, "High Improver", "Stærk øvet, næsten intermediate"), + (70, "Low Intermediate", "Begyndende intermediate"), + (80, "Intermediate", "Erfaren danser"), + (90, "High Intermediate", "Stærk intermediate"), + (99, "Advanced", "Fuld beherskelse af trin og teknik"), +] def init_db(): - """Opret alle tabeller hvis de ikke findes.""" - conn = _get_conn() - - # executescript committer automatisk og nulstiller isolation_level - # Kør det direkte på den underliggende connection - conn.executescript(""" - CREATE TABLE IF NOT EXISTS libraries ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - path TEXT NOT NULL UNIQUE, - is_active INTEGER NOT NULL DEFAULT 1, - last_full_scan TEXT, - created_at TEXT NOT NULL DEFAULT (datetime('now')) - ); - - CREATE TABLE IF NOT EXISTS songs ( - id TEXT PRIMARY KEY, - library_id INTEGER REFERENCES libraries(id), - local_path TEXT NOT NULL UNIQUE, - title TEXT NOT NULL DEFAULT '', - artist TEXT NOT NULL DEFAULT '', - album TEXT NOT NULL DEFAULT '', - bpm INTEGER NOT NULL DEFAULT 0, - duration_sec INTEGER NOT NULL DEFAULT 0, - file_format TEXT NOT NULL DEFAULT '', - file_modified_at TEXT NOT NULL, - file_missing INTEGER NOT NULL DEFAULT 0, - extra_tags TEXT NOT NULL DEFAULT '{}', - api_song_id TEXT, - last_synced_at TEXT, - created_at TEXT NOT NULL DEFAULT (datetime('now')) - ); - - CREATE TABLE IF NOT EXISTS dance_levels ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - sort_order INTEGER NOT NULL, - name TEXT NOT NULL UNIQUE, - description TEXT NOT NULL DEFAULT '', - synced_at TEXT - ); - - -- Dans-entitet: navn + niveau er unik kombination - CREATE TABLE IF NOT EXISTS dances ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL COLLATE NOCASE, - level_id INTEGER REFERENCES dance_levels(id), - use_count INTEGER NOT NULL DEFAULT 1, - source TEXT NOT NULL DEFAULT 'local', - synced_at TEXT, - UNIQUE(name, level_id) - ); - - -- Hoveddanse på en sang - CREATE TABLE IF NOT EXISTS song_dances ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - song_id TEXT NOT NULL REFERENCES songs(id) ON DELETE CASCADE, - dance_id INTEGER NOT NULL REFERENCES dances(id), - dance_order INTEGER NOT NULL DEFAULT 1, - UNIQUE(song_id, dance_id) - ); - - -- Alternativ-danse på en sang - CREATE TABLE IF NOT EXISTS song_alt_dances ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - song_id TEXT NOT NULL REFERENCES songs(id) ON DELETE CASCADE, - dance_id INTEGER NOT NULL REFERENCES dances(id), - note TEXT NOT NULL DEFAULT '', - source TEXT NOT NULL DEFAULT 'local', - created_at TEXT NOT NULL DEFAULT (datetime('now')), - UNIQUE(song_id, dance_id) - ); - - CREATE TABLE IF NOT EXISTS playlists ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL, - description TEXT NOT NULL DEFAULT '', - api_project_id TEXT, - last_synced_at TEXT, - created_at TEXT NOT NULL DEFAULT (datetime('now')) - ); - - CREATE TABLE IF NOT EXISTS playlist_songs ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - playlist_id INTEGER NOT NULL REFERENCES playlists(id) ON DELETE CASCADE, - song_id TEXT NOT NULL REFERENCES songs(id), - position INTEGER NOT NULL, - status TEXT NOT NULL DEFAULT 'pending', - UNIQUE(playlist_id, position) - ); - - CREATE TABLE IF NOT EXISTS sync_queue ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - entity_type TEXT NOT NULL, - entity_id TEXT NOT NULL, - action TEXT NOT NULL, - payload TEXT NOT NULL, - created_at TEXT NOT NULL DEFAULT (datetime('now')) - ); - - CREATE TABLE IF NOT EXISTS event_state ( - key TEXT PRIMARY KEY, - value TEXT NOT NULL - ); - - CREATE INDEX IF NOT EXISTS idx_songs_title ON songs(title); - CREATE INDEX IF NOT EXISTS idx_songs_artist ON songs(artist); - CREATE INDEX IF NOT EXISTS idx_songs_missing ON songs(file_missing); - CREATE INDEX IF NOT EXISTS idx_songs_library ON songs(library_id); - CREATE INDEX IF NOT EXISTS idx_song_dances ON song_dances(song_id); - CREATE INDEX IF NOT EXISTS idx_song_alt_dances ON song_alt_dances(song_id); - CREATE INDEX IF NOT EXISTS idx_dances_name ON dances(name); - """) - - # executescript slår foreign_keys fra — genaktiver - conn.execute("PRAGMA foreign_keys=ON") - - # Tilføj db_version tabel hvis den ikke findes - conn.execute(""" - CREATE TABLE IF NOT EXISTS db_version ( - version INTEGER PRIMARY KEY - ) - """) - - # Kør versionsbaserede migrationer - _run_versioned_migrations(conn) - - # Seed standard-niveauer - count = conn.execute("SELECT COUNT(*) FROM dance_levels").fetchone()[0] - if count == 0: - defaults = [ - (10, "Absolute Beginner", "Ingen tidligere danse-erfaring kræves"), - (20, "Beginner", "Lidt tidligere erfaring"), - (30, "High Beginner", "God begynder, klar til mere"), - (40, "Low Improver", "Begyndende øvet"), - (50, "Improver", "Grundlæggende færdigheder på plads"), - (60, "High Improver", "Stærk øvet, næsten intermediate"), - (70, "Low Intermediate", "Begyndende intermediate"), - (80, "Intermediate", "Erfaren danser"), - (90, "High Intermediate", "Stærk intermediate"), - (99, "Advanced", "Fuld beherskelse af trin og teknik"), - ] - for row in defaults: + """Opret tabeller og seed dance_levels hvis de mangler.""" + DB_PATH.parent.mkdir(parents=True, exist_ok=True) + with get_db() as conn: + conn.executescript(SCHEMA) + # Seed dans-niveauer + for sort_order, name, desc in DEFAULT_DANCE_LEVELS: conn.execute( "INSERT OR IGNORE INTO dance_levels (sort_order, name, description) VALUES (?,?,?)", - row + (sort_order, name, desc) ) - - -# ── Versionsbaserede migrationer ────────────────────────────────────────────── -# Tilføj aldrig gamle — tilføj kun nye versioner nederst. - -MIGRATIONS: dict[int, list[str]] = { - 1: [ - "ALTER TABLE songs ADD COLUMN extra_tags TEXT NOT NULL DEFAULT '{}'", - ], - 2: [ - # Ny dans-entitet model - """CREATE TABLE IF NOT EXISTS dances ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL COLLATE NOCASE, - level_id INTEGER REFERENCES dance_levels(id), - use_count INTEGER NOT NULL DEFAULT 1, - source TEXT NOT NULL DEFAULT 'local', - synced_at TEXT, - UNIQUE(name, level_id) - )""", - """CREATE TABLE IF NOT EXISTS song_alt_dances ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - song_id TEXT NOT NULL REFERENCES songs(id) ON DELETE CASCADE, - dance_id INTEGER NOT NULL REFERENCES dances(id), - note TEXT NOT NULL DEFAULT '', - source TEXT NOT NULL DEFAULT 'local', - created_at TEXT NOT NULL DEFAULT (datetime('now')), - UNIQUE(song_id, dance_id) - )""", - # Migrer eksisterende song_dances data til ny model - # (kører kun på ældre databaser der har dance_name kolonnen) - """INSERT OR IGNORE INTO dances (name, level_id, source) - SELECT DISTINCT dance_name, level_id, 'local' - FROM song_dances WHERE dance_name IS NOT NULL AND dance_name != ''""", - ], - 3: [ - "ALTER TABLE playlists ADD COLUMN tags TEXT NOT NULL DEFAULT ''", - ], - 4: [ - "ALTER TABLE dances ADD COLUMN choreographer TEXT NOT NULL DEFAULT ''", - "ALTER TABLE dances ADD COLUMN video_url TEXT NOT NULL DEFAULT ''", - "ALTER TABLE dances ADD COLUMN stepsheet_url TEXT NOT NULL DEFAULT ''", - "ALTER TABLE dances ADD COLUMN notes TEXT NOT NULL DEFAULT ''", - ], - 5: [ - # Workshop-markering på sang+dans kombination (ikke dans alene) - """ALTER TABLE song_dances ADD COLUMN is_workshop INTEGER NOT NULL DEFAULT 0""", - """ALTER TABLE song_alt_dances ADD COLUMN is_workshop INTEGER NOT NULL DEFAULT 0""", - ], - 6: [ - # Workshop og dans-valg på selve playlist-sangen - """ALTER TABLE playlist_songs ADD COLUMN is_workshop INTEGER NOT NULL DEFAULT 0""", - """ALTER TABLE playlist_songs ADD COLUMN dance_override TEXT NOT NULL DEFAULT ''""", - ], - 7: [ - # Linkede server-playlister - """ALTER TABLE playlists ADD COLUMN is_linked INTEGER NOT NULL DEFAULT 0""", - """ALTER TABLE playlists ADD COLUMN server_permission TEXT NOT NULL DEFAULT 'view'""", - ], - 8: [ - # MusicBrainz og AcoustID matching - """ALTER TABLE songs ADD COLUMN mbid TEXT""", - """ALTER TABLE songs ADD COLUMN acoustid TEXT""", - ], - 9: [ - # Opdater niveau-navne til korrekte betegnelser i rigtig rækkefølge - "DELETE FROM dance_levels", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (10, 'Absolute Beginner', 'Ingen tidligere danse-erfaring kræves')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (20, 'Beginner', 'Lidt tidligere erfaring')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (30, 'High Beginner', 'God begynder, klar til mere')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (40, 'Low Improver', 'Begyndende øvet')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (50, 'Improver', 'Grundlæggende færdigheder på plads')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (60, 'High Improver', 'Stærk øvet, næsten intermediate')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (70, 'Low Intermediate', 'Begyndende intermediate')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (80, 'Intermediate', 'Erfaren danser')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (90, 'High Intermediate', 'Stærk intermediate')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (99, 'Advanced', 'Fuld beherskelse af trin og teknik')", - ], - 10: [ - # Ret stavefejl i eksisterende data - "UPDATE dance_levels SET name='Low Intermediate' WHERE name='Low Intermidiate' OR name='Low Intermidiat'", - "UPDATE dance_levels SET name='Intermediate' WHERE name='Intermidiate' OR name='Intermidate'", - "UPDATE dance_levels SET name='High Intermediate' WHERE name='High Intermidiate' OR name='High Intermidiat'", - ], - 11: [ - # Genopret dance_levels med korrekte navne og rækkefølge - "DELETE FROM dance_levels", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (10, 'Absolute Beginner', 'Ingen tidligere danse-erfaring kræves')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (20, 'Beginner', 'Lidt tidligere erfaring')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (30, 'High Beginner', 'God begynder, klar til mere')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (40, 'Low Improver', 'Begyndende øvet')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (50, 'Improver', 'Grundlæggende færdigheder på plads')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (60, 'High Improver', 'Stærk øvet, næsten intermediate')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (70, 'Low Intermediate', 'Begyndende intermediate')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (80, 'Intermediate', 'Erfaren danser')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (90, 'High Intermediate', 'Stærk intermediate')", - "INSERT INTO dance_levels (sort_order, name, description) VALUES (99, 'Advanced', 'Fuld beherskelse af trin og teknik')", - ], - 12: [ - # Tabel til at huske slettede playlister — til sync med serveren - """CREATE TABLE IF NOT EXISTS deleted_playlists ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL, - deleted_at TEXT NOT NULL DEFAULT (datetime('now')) - )""", - ], - 13: [ - # Soft delete: is_deleted=1 i stedet for at slette rækken. - # Giver sync mulighed for at sende sletningen til serveren. - """ALTER TABLE playlists ADD COLUMN is_deleted INTEGER NOT NULL DEFAULT 0""", - # api_project_id kolonne på deleted_playlists (hvis den mangler fra migration 12) - """ALTER TABLE deleted_playlists ADD COLUMN api_project_id TEXT""", - ], -} - - -def _run_versioned_migrations(conn): - """Kør kun migrationer der ikke allerede er kørt vha. db_version tabel.""" - row = conn.execute("SELECT version FROM db_version").fetchone() - current_version = row["version"] if row else 0 - - for version in sorted(MIGRATIONS.keys()): - if version <= current_version: - continue - for sql in MIGRATIONS[version]: - try: - conn.execute(sql) - except Exception: - pass # kolonnen eksisterer allerede - conn.execute( - "INSERT OR REPLACE INTO db_version (version) VALUES (?)", (version,) - ) - - - - - + logger.info("Database initialiseret") # ── Biblioteker ─────────────────────────────────────────────────────────────── - def add_library(path: str) -> int: with get_db() as conn: cur = conn.execute( "INSERT OR IGNORE INTO libraries (path) VALUES (?)", (path,) ) - if cur.lastrowid: - return cur.lastrowid - row = conn.execute("SELECT id FROM libraries WHERE path=?", (path,)).fetchone() - return row["id"] + return cur.lastrowid -def get_libraries(active_only: bool = True) -> list[sqlite3.Row]: +def get_libraries() -> list: with get_db() as conn: - if active_only: - return conn.execute( - "SELECT * FROM libraries WHERE is_active=1 ORDER BY path" - ).fetchall() - return conn.execute("SELECT * FROM libraries ORDER BY path").fetchall() + return conn.execute( + "SELECT * FROM libraries WHERE is_active=1 ORDER BY path" + ).fetchall() def remove_library(library_id: int): with get_db() as conn: - # Marker sange som manglende og løsriv dem fra biblioteket - conn.execute( - "UPDATE songs SET file_missing=1, library_id=NULL WHERE library_id=?", - (library_id,) - ) - # Nu kan biblioteket slettes uden FK-konflikt - conn.execute("DELETE FROM libraries WHERE id=?", (library_id,)) - - -def update_library_scan_time(library_id: int): - now = datetime.now(timezone.utc).isoformat() - with get_db() as conn: - conn.execute( - "UPDATE libraries SET last_full_scan=? WHERE id=?", (now, library_id) - ) + row = conn.execute( + "SELECT path FROM libraries WHERE id=?", (library_id,) + ).fetchone() + if row: + # Marker filer fra denne mappe som missing + conn.execute( + "UPDATE files SET file_missing=1 WHERE local_path LIKE ?", + (row["path"] + "%",) + ) + conn.execute("DELETE FROM libraries WHERE id=?", (library_id,)) # ── Sange ───────────────────────────────────────────────────────────────────── -def upsert_song(song_data: dict) -> str: +def find_or_create_song(title: str, artist: str = "", album: str = "", + bpm: int = 0, duration_sec: int = 0, + mbid: str = "", acoustid: str = "", + song_id: str = None) -> str: """ - Indsæt eller opdater en sang baseret på local_path. - Returnerer song_id. + Find eksisterende sang eller opret ny. Returnerer song_id. + Match-hierarki: server_id → mbid → acoustid → titel+artist → opret ny """ - import uuid, json + import uuid as _uuid + with get_db() as conn: + # Match på server-ID + if song_id: + row = conn.execute( + "SELECT id FROM songs WHERE id=?", (song_id,) + ).fetchone() + if row: + # Opdater data hvis bedre info tilgængeligt + conn.execute(""" + UPDATE songs SET + title = CASE WHEN title='' THEN ? ELSE title END, + artist = CASE WHEN artist='' THEN ? ELSE artist END, + bpm = CASE WHEN bpm=0 THEN ? ELSE bpm END, + mbid = CASE WHEN mbid IS NULL AND ? != '' THEN ? ELSE mbid END, + acoustid = CASE WHEN acoustid IS NULL AND ? != '' THEN ? ELSE acoustid END + WHERE id=? + """, (title, artist, bpm, mbid, mbid, acoustid, acoustid, song_id)) + return song_id + + # Match på MBID + if mbid: + row = conn.execute( + "SELECT id FROM songs WHERE mbid=?", (mbid,) + ).fetchone() + if row: + return row["id"] + + # Match på AcoustID + if acoustid: + row = conn.execute( + "SELECT id FROM songs WHERE acoustid=?", (acoustid,) + ).fetchone() + if row: + if mbid: + conn.execute( + "UPDATE songs SET mbid=? WHERE id=? AND mbid IS NULL", + (mbid, row["id"]) + ) + return row["id"] + + # Match på titel + artist + if title: + row = conn.execute( + "SELECT id FROM songs WHERE title=? AND artist=?", + (title, artist) + ).fetchone() + if row: + if mbid: + conn.execute( + "UPDATE songs SET mbid=? WHERE id=? AND mbid IS NULL", + (mbid, row["id"]) + ) + return row["id"] + + # Opret ny sang + new_id = song_id or str(_uuid.uuid4()) + conn.execute( + "INSERT INTO songs (id, title, artist, album, bpm, duration_sec, mbid, acoustid) " + "VALUES (?,?,?,?,?,?,?,?)", + (new_id, title, artist, album, bpm, duration_sec, + mbid or None, acoustid or None) + ) + return new_id + + +def get_song(song_id: str) -> sqlite3.Row | None: + with get_db() as conn: + return conn.execute( + "SELECT * FROM songs WHERE id=?", (song_id,) + ).fetchone() + + +def update_song_bpm(song_id: str, bpm: int): + with get_db() as conn: + conn.execute( + "UPDATE songs SET bpm=? WHERE id=? AND (bpm=0 OR bpm IS NULL)", + (bpm, song_id) + ) + + +def update_song_mbid(song_id: str, mbid: str, acoustid: str = ""): + with get_db() as conn: + conn.execute( + "UPDATE songs SET mbid=?, acoustid=? WHERE id=?", + (mbid or None, acoustid or None, song_id) + ) + + +# ── Filer ───────────────────────────────────────────────────────────────────── + +def upsert_file(song_id: str, local_path: str, file_format: str = "", + file_modified_at: str = "", extra_tags: str = "{}") -> str: + """Opret eller opdater en fil-post. Returnerer file_id.""" + import uuid as _uuid with get_db() as conn: existing = conn.execute( - "SELECT id FROM songs WHERE local_path=?", (song_data["local_path"],) + "SELECT id FROM files WHERE local_path=?", (local_path,) + ).fetchone() + if existing: + conn.execute(""" + UPDATE files SET + song_id=?, file_missing=0, + file_format=?, file_modified_at=?, extra_tags=? + WHERE id=? + """, (song_id, file_format, file_modified_at, extra_tags, existing["id"])) + return existing["id"] + else: + file_id = str(_uuid.uuid4()) + conn.execute( + "INSERT INTO files (id, song_id, local_path, file_format, file_modified_at, extra_tags) " + "VALUES (?,?,?,?,?,?)", + (file_id, song_id, local_path, file_format, file_modified_at, extra_tags) + ) + return file_id + + +def get_file_for_song(song_id: str) -> sqlite3.Row | None: + """Find bedste tilgængelige fil for en sang.""" + with get_db() as conn: + return conn.execute( + "SELECT * FROM files WHERE song_id=? AND file_missing=0 LIMIT 1", + (song_id,) ).fetchone() - extra_tags_json = json.dumps(song_data.get("extra_tags", {}), ensure_ascii=False) - if existing: - song_id = existing["id"] - conn.execute(""" - UPDATE songs SET - library_id=?, title=?, artist=?, album=?, bpm=?, duration_sec=?, - file_format=?, file_modified_at=?, file_missing=0, extra_tags=? - WHERE id=? - """, ( - song_data.get("library_id"), - song_data.get("title", ""), - song_data.get("artist", ""), - song_data.get("album", ""), - song_data.get("bpm", 0), - song_data.get("duration_sec", 0), - song_data.get("file_format", ""), - song_data.get("file_modified_at", ""), - extra_tags_json, - song_id, - )) - else: - song_id = str(uuid.uuid4()) - conn.execute(""" - INSERT INTO songs - (id, library_id, local_path, title, artist, album, - bpm, duration_sec, file_format, file_modified_at, extra_tags) - VALUES (?,?,?,?,?,?,?,?,?,?,?) - """, ( - song_id, - song_data.get("library_id"), - song_data["local_path"], - song_data.get("title", ""), - song_data.get("artist", ""), - song_data.get("album", ""), - song_data.get("bpm", 0), - song_data.get("duration_sec", 0), - song_data.get("file_format", ""), - song_data.get("file_modified_at", ""), - extra_tags_json, - )) - - # Opdater danse hvis de er med i data — bevar eksisterende og merge - if "dances" in song_data: - file_dances = [] - for dance in song_data["dances"]: - name = dance.get("name", dance) if isinstance(dance, dict) else dance - if name: - file_dances.append(name.strip()) - - # Find eksisterende song_dances via dances tabel - existing = conn.execute(""" - SELECT sd.id, d.name, sd.dance_order, d.level_id, d.id as dance_id - FROM song_dances sd - JOIN dances d ON d.id = sd.dance_id - WHERE sd.song_id=? ORDER BY sd.dance_order - """, (song_id,)).fetchall() - existing_map = {r["name"].lower(): r for r in existing} - file_lower = [d.lower() for d in file_dances] - - # Slet danse der ikke længere er i filen - for row in existing: - if row["name"].lower() not in file_lower: - conn.execute("DELETE FROM song_dances WHERE id=?", (row["id"],)) - - # Tilføj eller opdater danse fra filen - for i, name in enumerate(file_dances, start=1): - ex = existing_map.get(name.lower()) - if ex: - conn.execute( - "UPDATE song_dances SET dance_order=? WHERE id=?", - (i, ex["id"]) - ) - else: - # Opret eller find dans (name + NULL level = ny dans uden niveau) - dance_id = get_or_create_dance(name, None, conn) - conn.execute( - "INSERT OR IGNORE INTO song_dances (song_id, dance_id, dance_order) " - "VALUES (?,?,?)", - (song_id, dance_id, i) - ) - - return song_id +def get_file(file_id: str) -> sqlite3.Row | None: + with get_db() as conn: + return conn.execute( + "SELECT * FROM files WHERE id=?", (file_id,) + ).fetchone() -def mark_song_missing(local_path: str): +def mark_file_missing(local_path: str): with get_db() as conn: conn.execute( - "UPDATE songs SET file_missing=1 WHERE local_path=?", (local_path,) + "UPDATE files SET file_missing=1 WHERE local_path=?", (local_path,) ) -def get_song_by_path(local_path: str) -> sqlite3.Row | None: +def get_all_known_paths() -> set[str]: with get_db() as conn: - return conn.execute( - "SELECT * FROM songs WHERE local_path=?", (local_path,) - ).fetchone() + rows = conn.execute("SELECT local_path FROM files").fetchall() + return {r["local_path"] for r in rows} -def search_songs(query: str, limit: int = 50) -> list[sqlite3.Row]: - """Søg i titel, artist, album, dans, koreograf, niveau og øvrige tags.""" - import logging as _log - _log.getLogger(__name__).info(f"search_songs: '{query}'") - pattern = f"%{query}%" +# ── Søgning i bibliotek ─────────────────────────────────────────────────────── + +def search_songs(query: str, limit: int = 200) -> list: + """Søg i sange der har en tilgængelig fil.""" with get_db() as conn: - rows = conn.execute(""" - SELECT DISTINCT s.* FROM songs s + pattern = f"%{query}%" + return conn.execute(""" + SELECT s.*, f.id as file_id, f.local_path, f.file_format, f.file_missing, + GROUP_CONCAT(d.name, ', ') as dance_names + FROM songs s + JOIN files f ON f.song_id = s.id AND f.file_missing = 0 LEFT JOIN song_dances sd ON sd.song_id = s.id LEFT JOIN dances d ON d.id = sd.dance_id - LEFT JOIN dance_levels dl ON dl.id = d.level_id - WHERE s.file_missing = 0 - AND ( - s.title LIKE ? OR - s.artist LIKE ? OR - s.album LIKE ? OR - d.name LIKE ? OR - d.choreographer LIKE ? OR - dl.name LIKE ? OR - s.extra_tags LIKE ? - ) - ORDER BY s.artist, s.title + WHERE s.title LIKE ? OR s.artist LIKE ? OR s.album LIKE ? + GROUP BY s.id + ORDER BY s.title LIMIT ? - """, (pattern,)*7 + (limit,)).fetchall() - _log.getLogger(__name__).info(f"search_songs: '{query}' → {len(rows)} resultater") - return rows + """, (pattern, pattern, pattern, limit)).fetchall() -def get_songs_for_library(library_id: int) -> list[sqlite3.Row]: +def get_all_songs_with_files(limit: int = 5000) -> list: + """Hent alle sange med tilgængelige filer — til biblioteksvisning.""" with get_db() as conn: - return conn.execute( - "SELECT * FROM songs WHERE library_id=? ORDER BY artist, title", - (library_id,) - ).fetchall() + return conn.execute(""" + SELECT s.*, f.id as file_id, f.local_path, f.file_format, f.file_missing, + GROUP_CONCAT(d.name ORDER BY sd.dance_order, ', ') as dance_names + FROM songs s + JOIN files f ON f.song_id = s.id AND f.file_missing = 0 + LEFT JOIN song_dances sd ON sd.song_id = s.id + LEFT JOIN dances d ON d.id = sd.dance_id + GROUP BY s.id + ORDER BY s.title + LIMIT ? + """, (limit,)).fetchall() -def get_all_song_paths_for_library(library_id: int) -> dict[str, str]: - """Returnerer {local_path: file_modified_at} — bruges til fuld scan.""" - with get_db() as conn: - rows = conn.execute( - "SELECT local_path, file_modified_at FROM songs WHERE library_id=?", - (library_id,) - ).fetchall() - return {row["local_path"]: row["file_modified_at"] for row in rows} +# ── Playlister ──────────────────────────────────────────────────────────────── - -# ── Afspilningslister ───────────────────────────────────────────────────────── - -def create_playlist(name: str, description: str = "", tags: str = "") -> int: - with get_db() as conn: - cur = conn.execute( - "INSERT INTO playlists (name, description, tags) VALUES (?,?,?)", - (name, description, tags) - ) - return cur.lastrowid - - -def create_linked_playlist(name: str, api_project_id: str, - permission: str = "view", - description: str = "", tags: str = "") -> int: - """Opret en playliste der er linket til en server-playliste.""" - with get_db() as conn: - cur = conn.execute( - """INSERT INTO playlists - (name, description, tags, api_project_id, is_linked, server_permission) - VALUES (?,?,?,?,1,?)""", - (name, description, tags, api_project_id, permission) - ) - return cur.lastrowid - - -def update_playlist_tags(playlist_id: int, tags: str): +def create_playlist(name: str, description: str = "") -> str: + import uuid as _uuid + pl_id = str(_uuid.uuid4()) with get_db() as conn: conn.execute( - "UPDATE playlists SET tags=? WHERE id=?", - (tags, playlist_id) + "INSERT INTO playlists (id, name, description) VALUES (?,?,?)", + (pl_id, name, description) ) + return pl_id -def get_all_playlist_tags() -> list[str]: - """Returnerer alle unikke tags på tværs af alle playlists, sorteret alfabetisk.""" - with get_db() as conn: - rows = conn.execute( - "SELECT tags FROM playlists WHERE tags != '' AND name != ?", - ("__aktiv__",) - ).fetchall() - tags = set() - for row in rows: - for tag in row["tags"].split(","): - t = tag.strip().lower() - if t: - tags.add(t) - return sorted(tags) - - -def get_playlists(tag_filter: str | None = None) -> list[sqlite3.Row]: - """Hent alle navngivne playlists med sang-antal. Filtrer på tag hvis angivet.""" +def get_playlists(tag_filter: str | None = None) -> list: with get_db() as conn: if tag_filter: - rows = conn.execute(""" + return conn.execute(""" SELECT p.*, COUNT(ps.id) as song_count FROM playlists p LEFT JOIN playlist_songs ps ON ps.playlist_id = p.id - WHERE p.name != ? AND p.is_deleted = 0 AND ( - p.tags LIKE ? OR p.tags LIKE ? OR - p.tags LIKE ? OR p.tags = ? - ) - GROUP BY p.id - ORDER BY p.created_at DESC - """, ( - "__aktiv__", - f"{tag_filter},%", - f"%, {tag_filter},%", - f"%, {tag_filter}", - tag_filter, - )).fetchall() + WHERE p.name != '__aktiv__' AND p.is_deleted = 0 + AND (p.tags LIKE ? OR p.tags LIKE ? OR p.tags LIKE ? OR p.tags = ?) + GROUP BY p.id ORDER BY p.created_at DESC + """, (f"{tag_filter},%", f"%, {tag_filter},%", + f"%, {tag_filter}", tag_filter)).fetchall() else: - rows = conn.execute(""" + return conn.execute(""" SELECT p.*, COUNT(ps.id) as song_count FROM playlists p LEFT JOIN playlist_songs ps ON ps.playlist_id = p.id - WHERE p.name != ? AND p.is_deleted = 0 - GROUP BY p.id - ORDER BY p.created_at DESC - """, ("__aktiv__",)).fetchall() - return rows + WHERE p.name != '__aktiv__' AND p.is_deleted = 0 + GROUP BY p.id ORDER BY p.created_at DESC + """).fetchall() -def delete_playlist(playlist_id: int): - """ - Soft-slet en playliste: sæt is_deleted=1 i stedet for at fjerne rækken. - Sync kan derefter sende sletningen til serveren via api_project_id. - """ +def delete_playlist(playlist_id: str): + """Soft-slet — sæt is_deleted=1.""" with get_db() as conn: conn.execute( - "UPDATE playlists SET is_deleted=1 WHERE id=?", - (playlist_id,) + "UPDATE playlists SET is_deleted=1 WHERE id=?", (playlist_id,) ) - - -def add_song_to_playlist(playlist_id: int, song_id: str, position: int | None = None) -> int: - with get_db() as conn: - if position is None: - row = conn.execute( - "SELECT MAX(position) as max_pos FROM playlist_songs WHERE playlist_id=?", - (playlist_id,) - ).fetchone() - position = (row["max_pos"] or 0) + 1 - - cur = conn.execute( - "INSERT INTO playlist_songs (playlist_id, song_id, position) VALUES (?,?,?)", - (playlist_id, song_id, position) - ) - return cur.lastrowid - - -def update_playlist_song_status(playlist_song_id: int, status: str): - valid = {"pending", "playing", "played", "skipped"} - if status not in valid: - raise ValueError(f"Ugyldig status: {status}") - with get_db() as conn: - conn.execute( - "UPDATE playlist_songs SET status=? WHERE id=?", - (status, playlist_song_id) - ) - - -def get_playlist_with_songs(playlist_id: int) -> dict: +def get_playlist_with_songs(playlist_id: str) -> dict: with get_db() as conn: playlist = conn.execute( "SELECT * FROM playlists WHERE id=?", (playlist_id,) @@ -676,9 +471,12 @@ def get_playlist_with_songs(playlist_id: int) -> dict: songs = conn.execute(""" SELECT ps.id as ps_id, ps.position, ps.status, ps.is_workshop, ps.dance_override, - s.* + ps.song_id, ps.file_id, + s.title, s.artist, s.album, s.bpm, s.duration_sec, + f.local_path, f.file_format, f.file_missing FROM playlist_songs ps JOIN songs s ON s.id = ps.song_id + LEFT JOIN files f ON f.id = ps.file_id WHERE ps.playlist_id = ? ORDER BY ps.position """, (playlist_id,)).fetchall() @@ -686,212 +484,94 @@ def get_playlist_with_songs(playlist_id: int) -> dict: return {"playlist": dict(playlist), "songs": [dict(s) for s in songs]} -# ── Event-state (gemmes løbende så man kan genstarte efter strømsvigt) ──────── +def add_song_to_playlist(playlist_id: str, song_id: str, + file_id: str | None = None, + position: int | None = None) -> str: + import uuid as _uuid + with get_db() as conn: + if position is None: + row = conn.execute( + "SELECT MAX(position) as max_pos FROM playlist_songs WHERE playlist_id=?", + (playlist_id,) + ).fetchone() + position = (row["max_pos"] or 0) + 1 + ps_id = str(_uuid.uuid4()) + conn.execute( + "INSERT INTO playlist_songs (id, playlist_id, song_id, file_id, position) " + "VALUES (?,?,?,?,?)", + (ps_id, playlist_id, song_id, file_id, position) + ) + return ps_id + + +# ── Dans-tags ───────────────────────────────────────────────────────────────── + +def get_playlist_tags() -> list[str]: + with get_db() as conn: + rows = conn.execute( + "SELECT tags FROM playlists WHERE tags != '' AND name != '__aktiv__' AND is_deleted=0" + ).fetchall() + tags = set() + for row in rows: + for tag in row["tags"].split(","): + t = tag.strip().lower() + if t: + tags.add(t) + return sorted(tags) + + +def update_playlist_tags(playlist_id: str, tags: str): + with get_db() as conn: + conn.execute( + "UPDATE playlists SET tags=? WHERE id=?", (tags, playlist_id) + ) + + +# ── Event-state ─────────────────────────────────────────────────────────────── def save_event_state(current_idx: int, statuses: list[str]): - """Gem event-fremgang — overskrives ved hver ændring.""" import json with get_db() as conn: - conn.execute("INSERT OR REPLACE INTO event_state (key,value) VALUES ('current_idx',?)", - (str(current_idx),)) - conn.execute("INSERT OR REPLACE INTO event_state (key,value) VALUES ('statuses',?)", - (json.dumps(statuses),)) + conn.execute( + "INSERT OR REPLACE INTO event_state (key,value) VALUES ('current_idx',?)", + (str(current_idx),) + ) + conn.execute( + "INSERT OR REPLACE INTO event_state (key,value) VALUES ('statuses',?)", + (json.dumps(statuses),) + ) -def load_event_state() -> tuple[int, list[str]] | None: - """Indlæs gemt event-fremgang. Returnerer None hvis ingen gemt tilstand.""" +def load_event_state() -> tuple | None: import json with get_db() as conn: idx_row = conn.execute( "SELECT value FROM event_state WHERE key='current_idx'" ).fetchone() - sta_row = conn.execute( + stat_row = conn.execute( "SELECT value FROM event_state WHERE key='statuses'" ).fetchone() - if not idx_row or not sta_row: - return None - return int(idx_row["value"]), json.loads(sta_row["value"]) - - -def clear_event_state(): - """Nulstil gemt event-tilstand (bruges ved 'Start event').""" - with get_db() as conn: - conn.execute("DELETE FROM event_state") - - -# ── Dans-navne ordbog ───────────────────────────────────────────────────────── - -# ── Dans-entitet funktioner ─────────────────────────────────────────────────── - -def get_dance(dance_id: int) -> sqlite3.Row | None: - with get_db() as conn: - return conn.execute( - "SELECT * FROM dances WHERE id=?", (dance_id,) - ).fetchone() - - -def update_dance_info(dance_id: int, choreographer: str = "", - video_url: str = "", stepsheet_url: str = "", - notes: str = ""): - with get_db() as conn: - conn.execute(""" - UPDATE dances SET - choreographer = ?, - video_url = ?, - stepsheet_url = ?, - notes = ? - WHERE id = ? - """, (choreographer.strip(), video_url.strip(), - stepsheet_url.strip(), notes.strip(), dance_id)) - - - -def get_or_create_dance(name: str, level_id: int | None, - conn=None, choreographer: str = "") -> int: - """Find eller opret en dans (name + level_id kombination). - Returnerer dance_id. conn er valgfri — bruges ved nested kald.""" - name = name.strip() - choreo = choreographer.strip() - close = False - if conn is None: - conn = new_conn() - close = True - try: - existing = conn.execute( - "SELECT id FROM dances WHERE name=? COLLATE NOCASE AND level_id IS ?", - (name, level_id) - ).fetchone() - if existing: - conn.execute( - "UPDATE dances SET use_count=use_count+1" - + (", choreographer=?" if choreo else "") + - " WHERE id=?", - ((choreo, existing["id"]) if choreo else (existing["id"],)) - ) - return existing["id"] - conn.execute( - "INSERT INTO dances (name, level_id, choreographer, use_count, source) VALUES (?,?,?,1,'local')", - (name, level_id, choreo) - ) - return conn.execute( - "SELECT id FROM dances WHERE name=? COLLATE NOCASE AND level_id IS ?", - (name, level_id) - ).fetchone()["id"] - finally: - if close: - conn.commit() - conn.close() - - -def get_choreographer_suggestions(prefix: str, limit: int = 20) -> list[str]: - """Returnerer koreografer der starter med prefix, sorteret alfabetisk.""" - with get_db() as conn: - rows = conn.execute(""" - SELECT DISTINCT choreographer - FROM dances - WHERE choreographer LIKE ? COLLATE NOCASE - AND choreographer != '' - ORDER BY choreographer - LIMIT ? - """, (f"{prefix}%", limit)).fetchall() - return [r["choreographer"] for r in rows] - - -def get_dance_suggestions(prefix: str, limit: int = 20) -> list[dict]: - """Returnerer danse der matcher prefix i navn ELLER koreograf. - Sorteret efter popularitet — bruges til autoudfyld.""" - with get_db() as conn: - rows = conn.execute(""" - SELECT d.id, d.name, d.level_id, d.use_count, d.choreographer, - dl.name as level_name, dl.sort_order - FROM dances d - LEFT JOIN dance_levels dl ON dl.id = d.level_id - WHERE d.name LIKE ? COLLATE NOCASE - OR d.choreographer LIKE ? COLLATE NOCASE - ORDER BY d.use_count DESC, dl.sort_order, d.name - LIMIT ? - """, (f"%{prefix}%", f"%{prefix}%", limit)).fetchall() - return [dict(r) for r in rows] - - -def get_dances_for_song(song_id: str) -> list[dict]: - """Hent hoveddanse for en sang med niveau-info og workshop-flag.""" - with get_db() as conn: - rows = conn.execute(""" - SELECT d.id as dance_id, d.name, d.level_id, d.choreographer, - dl.name as level_name, sd.dance_order, - sd.id as song_dance_id, sd.is_workshop - FROM song_dances sd - JOIN dances d ON d.id = sd.dance_id - LEFT JOIN dance_levels dl ON dl.id = d.level_id - WHERE sd.song_id=? ORDER BY sd.dance_order - """, (song_id,)).fetchall() - return [dict(r) for r in rows] - - -def get_alt_dances_for_song(song_id: str) -> list[dict]: - """Hent alternativ-danse for en sang med niveau-info.""" - with get_db() as conn: - rows = conn.execute(""" - SELECT d.id as dance_id, d.name, d.level_id, - dl.name as level_name, sad.note, sad.source, sad.id as alt_id - FROM song_alt_dances sad - JOIN dances d ON d.id = sad.dance_id - LEFT JOIN dance_levels dl ON dl.id = d.level_id - WHERE sad.song_id=? ORDER BY d.name - """, (song_id,)).fetchall() - return [dict(r) for r in rows] + if not idx_row or not stat_row: + return None + return int(idx_row["value"]), json.loads(stat_row["value"]) # ── Dans-niveauer ───────────────────────────────────────────────────────────── -def get_dance_levels() -> list[sqlite3.Row]: - """Hent alle niveauer sorteret efter sort_order.""" +def get_dance_levels() -> list: with get_db() as conn: return conn.execute( "SELECT * FROM dance_levels ORDER BY sort_order" ).fetchall() -def sync_dance_levels_from_api(levels: list[dict]): - """Synkroniser niveauer fra API.""" - from datetime import datetime, timezone - now = datetime.now(timezone.utc).isoformat() +def upsert_dance_levels(levels: list[dict]): with get_db() as conn: for lvl in levels: conn.execute(""" - INSERT INTO dance_levels (sort_order, name, description, synced_at) - VALUES (?, ?, ?, ?) + INSERT INTO dance_levels (id, sort_order, name, description) + VALUES (:id, :sort_order, :name, :description) ON CONFLICT(name) DO UPDATE SET - sort_order = excluded.sort_order, - description = excluded.description, - synced_at = excluded.synced_at - """, (lvl["sort_order"], lvl["name"], lvl.get("description", ""), now)) - - -def sync_dances_from_api(dances: list[dict]): - """Synkroniser danse fra API — {name, level_id, use_count}.""" - from datetime import datetime, timezone - now = datetime.now(timezone.utc).isoformat() - with get_db() as conn: - for d in dances: - conn.execute(""" - INSERT INTO dances (name, level_id, use_count, source, synced_at) - VALUES (?, ?, ?, 'community', ?) - ON CONFLICT(name, level_id) DO UPDATE SET - use_count = MAX(use_count, excluded.use_count), - synced_at = excluded.synced_at - """, (d["name"], d.get("level_id"), d.get("use_count", 1), now)) - - -# Backwards compat alias -def get_dance_name_suggestions(prefix: str, limit: int = 20) -> list[str]: - """Returnerer dans-navne som strings — bruges af AutoLineEdit.""" - suggestions = get_dance_suggestions(prefix, limit) - result = [] - for s in suggestions: - if s.get("level_name"): - result.append(f"{s['name']} / {s['level_name']}") - else: - result.append(s["name"]) - return result \ No newline at end of file + sort_order=excluded.sort_order, + description=excluded.description + """, lvl) \ No newline at end of file diff --git a/linedance-app/local/scanner.py b/linedance-app/local/scanner.py index 284f3db6..2af684ca 100644 --- a/linedance-app/local/scanner.py +++ b/linedance-app/local/scanner.py @@ -1,17 +1,11 @@ """ -scanner.py — Scanning af musikbiblioteker i baggrunden. +scanner.py — Scanning af musikbiblioteker i baggrunden. v0.9 -Kører som en separat subprocess der scanner ét bibliotek ad gangen -og rapporterer fremgang via stdout JSON-linjer. - -Kan også importeres direkte og bruges via ScanWorker QThread. +Skriver til files-tabellen og finder/opretter sange i songs-tabellen. """ import os -import sys -import json -import sqlite3 -import uuid import logging +import time from pathlib import Path logger = logging.getLogger(__name__) @@ -19,8 +13,8 @@ logger = logging.getLogger(__name__) SUPPORTED = {'.mp3', '.flac', '.m4a', '.ogg', '.wav', '.aiff', '.wma'} -def is_supported(path: Path) -> bool: - return path.suffix.lower() in SUPPORTED +def is_supported(path) -> bool: + return Path(path).suffix.lower() in SUPPORTED def get_file_mtime(path: Path) -> str: @@ -32,30 +26,29 @@ def get_file_mtime(path: Path) -> str: def scan_library(library_id: int, library_path: str, db_path: str, overwrite_bpm: bool = False, - progress_callback=None): + progress_callback=None) -> int: """ - Scan ét bibliotek og upsert sange til SQLite. - progress_callback(done, total, current_file) kaldes løbende. + Scan ét bibliotek og upsert til files + songs tabellerne. + Returnerer antal scannede filer. """ + import sqlite3 from local.tag_reader import read_tags - - conn = sqlite3.connect(db_path) - conn.row_factory = sqlite3.Row + from local.local_db import find_or_create_song, upsert_file base = Path(library_path) if not base.exists(): - conn.close() return 0 - # Byg indeks over kendte filer + # Byg indeks over kendte filer (path → mtime) + conn = sqlite3.connect(db_path, timeout=10) + conn.row_factory = sqlite3.Row + conn.execute("PRAGMA journal_mode=WAL") + known = {} for row in conn.execute( - "SELECT local_path, file_modified_at, file_missing FROM songs WHERE library_id=?", - (library_id,) + "SELECT local_path, file_modified_at FROM files WHERE file_missing=0" ).fetchall(): - # Sange markeret som manglende medtages ikke i known — de skal altid genscanes - if not row["file_missing"]: - known[row["local_path"]] = row["file_modified_at"] + known[row["local_path"]] = row["file_modified_at"] # Find alle musikfiler all_files = [] @@ -68,8 +61,6 @@ def scan_library(library_id: int, library_path: str, db_path: str, total = len(all_files) done = 0 - import time - for fp in all_files: path_str = str(fp) mtime = get_file_mtime(fp) @@ -77,108 +68,55 @@ def scan_library(library_id: int, library_path: str, db_path: str, if progress_callback: progress_callback(done, total, fp.name) - # Spring over hvis ikke ændret + # Spring over uændrede filer if path_str in known and known[path_str] == mtime: done += 1 - # Yield hvert 100. fil så andre tråde kan køre - if done % 100 == 0: - time.sleep(0.005) + time.sleep(0.005) continue try: - tags = read_tags(fp) - extra = json.dumps(tags.get("extra_tags", {}), ensure_ascii=False) + tags = read_tags(str(fp)) + title = tags.get("title", "") or fp.stem + artist = tags.get("artist", "") + album = tags.get("album", "") + bpm = tags.get("bpm", 0) + mbid = tags.get("mbid", "") + acoustid = tags.get("acoustid", "") + duration_sec = tags.get("duration_sec", 0) + file_format = tags.get("file_format", fp.suffix.lstrip(".").lower()) + extra_tags = tags.get("extra_tags", "{}") - # Match 0: MBID-match — sikrest mulige match - existing = None - mbid_from_file = tags.get("mbid", "") - if mbid_from_file: - existing = conn.execute( - "SELECT id, bpm FROM songs WHERE mbid=? LIMIT 1", - (mbid_from_file,) - ).fetchone() - if existing: - conn.execute( - "UPDATE songs SET local_path=? WHERE id=?", - (path_str, existing["id"]) - ) + # Find eller opret sang i global katalog + song_id = find_or_create_song( + title=title, artist=artist, album=album, + bpm=bpm, duration_sec=duration_sec, + mbid=mbid, acoustid=acoustid, + ) - # Match 1: præcis sti-match - if not existing: - existing = conn.execute( - "SELECT id, bpm FROM songs WHERE local_path=?", (path_str,) - ).fetchone() + # Opdater BPM på sangen hvis vi har bedre data + if bpm and bpm > 0: + conn.execute( + "UPDATE songs SET bpm=? WHERE id=? AND (bpm=0 OR bpm IS NULL)", + (bpm, song_id) + ) - # Match 2: titel+artist match — fil er flyttet eller var missing - if not existing: - title = tags.get("title", "") - artist = tags.get("artist", "") - if title: - # Prioritér file_missing=1 sange, men tag også sange med ugyldig sti - existing = conn.execute(""" - SELECT id, bpm FROM songs - WHERE title=? AND artist=? AND file_missing=1 - LIMIT 1 - """, (title, artist)).fetchone() - if not existing: - # Tjek om der er en sang med samme titel+artist men ugyldig sti - existing = conn.execute(""" - SELECT id, bpm, local_path FROM songs - WHERE title=? AND artist=? AND file_missing=0 - LIMIT 1 - """, (title, artist)).fetchone() - if existing: - from pathlib import Path as _Path - old_path = existing["local_path"] or "" - if old_path and not _Path(old_path).exists(): - pass # Sti er ugyldig — brug dette match - else: - existing = None # Sti er valid — det er en anden fil + # Opret eller opdater fil-post + upsert_file( + song_id=song_id, + local_path=path_str, + file_format=file_format, + file_modified_at=mtime, + extra_tags=extra_tags, + ) - if existing: - # Opdater stien så den peger på den nye placering - conn.execute( - "UPDATE songs SET local_path=? WHERE id=?", - (path_str, existing["id"]) - ) - - if existing: - bpm = tags.get("bpm", 0) - if not overwrite_bpm and existing["bpm"] and existing["bpm"] > 0: - bpm = existing["bpm"] # behold eksisterende BPM - mbid = tags.get("mbid", "") - conn.execute(""" - UPDATE songs SET - library_id=?, title=?, artist=?, album=?, - bpm=?, duration_sec=?, file_format=?, - file_modified_at=?, file_missing=0, extra_tags=?, - mbid=CASE WHEN ? != '' THEN ? ELSE mbid END - WHERE id=? - """, (library_id, tags.get("title",""), tags.get("artist",""), - tags.get("album",""), bpm, tags.get("duration_sec",0), - tags.get("file_format",""), mtime, extra, - mbid, mbid, existing["id"])) - song_id = existing["id"] - else: - song_id = str(uuid.uuid4()) - conn.execute(""" - INSERT OR IGNORE INTO songs - (id, library_id, local_path, title, artist, album, - bpm, duration_sec, file_format, file_modified_at, extra_tags, mbid) - VALUES (?,?,?,?,?,?,?,?,?,?,?,?) - """, (song_id, library_id, path_str, - tags.get("title",""), tags.get("artist",""), - tags.get("album",""), tags.get("bpm",0), - tags.get("duration_sec",0), tags.get("file_format",""), - mtime, extra, tags.get("mbid",""))) - - # Importer dans-tags fra filen hvis de ikke allerede er i DB + # Dans-tags fra fil file_dances = tags.get("dances", []) if file_dances: - existing_dances = conn.execute( + existing_count = conn.execute( "SELECT COUNT(*) FROM song_dances WHERE song_id=?", (song_id,) ).fetchone()[0] - if existing_dances == 0: + if existing_count == 0: + import uuid for order, dance_name in enumerate(file_dances, start=1): dance_row = conn.execute( "SELECT id FROM dances WHERE name=? COLLATE NOCASE LIMIT 1", @@ -192,64 +130,25 @@ def scan_library(library_id: int, library_path: str, db_path: str, else: dance_id = dance_row["id"] conn.execute( - "INSERT OR IGNORE INTO song_dances (song_id, dance_id, dance_order) VALUES (?,?,?)", - (song_id, dance_id, order) + "INSERT OR IGNORE INTO song_dances (id, song_id, dance_id, dance_order) VALUES (?,?,?,?)", + (str(uuid.uuid4()), song_id, dance_id, order) ) - conn.commit() + except Exception as e: - # UNIQUE constraint er forventet og ufarlig — sang findes allerede - if "UNIQUE constraint" in str(e): - logger.debug(f"Sang allerede i DB: {fp.name}") - else: - logger.warning(f"Scan fejl {fp.name}: {e}") + logger.warning(f"Scan fejl {fp.name}: {e}") done += 1 - # Lille pause efter hver scannet fil så GUI ikke hænger time.sleep(0.02) # Marker manglende filer for path_str in known: if not Path(path_str).exists(): conn.execute( - "UPDATE songs SET file_missing=1 WHERE local_path=?", (path_str,) + "UPDATE files SET file_missing=1 WHERE local_path=?", (path_str,) ) conn.commit() - - conn.execute( - "UPDATE libraries SET last_full_scan=datetime('now') WHERE id=?", - (library_id,) - ) - conn.commit() conn.close() - return done - -# ── Subprocess entry point ───────────────────────────────────────────────────── - -if __name__ == "__main__": - """ - Kørsel som subprocess: - python scanner.py - Rapporterer JSON-linjer til stdout: {"done":N,"total":M,"file":"..."} - """ - if len(sys.argv) < 4: - sys.exit(1) - - lib_id = int(sys.argv[1]) - lib_path = sys.argv[2] - db_path = sys.argv[3] - - # Tilføj app-mappen til path så local.tag_reader kan importeres - app_dir = str(Path(__file__).parent.parent) - if app_dir not in sys.path: - sys.path.insert(0, app_dir) - - def report(done, total, filename): - print(json.dumps({"done": done, "total": total, "file": filename}), - flush=True) - - count = scan_library(lib_id, lib_path, db_path, - progress_callback=report) - print(json.dumps({"done": count, "total": count, "finished": True}), - flush=True) \ No newline at end of file + logger.info(f"Scan færdig: {done} filer i {library_path}") + return done \ No newline at end of file diff --git a/linedance-app/local/sync_manager.py b/linedance-app/local/sync_manager.py index b45abada..2cbfa430 100644 --- a/linedance-app/local/sync_manager.py +++ b/linedance-app/local/sync_manager.py @@ -1,150 +1,122 @@ """ -sync_manager.py — Synkronisering mellem lokal SQLite og server API. -Kører i baggrundstråd — blokerer aldrig GUI. +sync_manager.py — Synkronisering mellem lokal database og server. v0.9 """ import json +import logging import sqlite3 import threading import urllib.request import urllib.error -import logging from pathlib import Path logger = logging.getLogger(__name__) class SyncManager: - def __init__(self, db_path: str, server_url: str, token: str): - self._db_path = db_path - self._server_url = server_url.rstrip("/") - self._token = token - self._lock = threading.Lock() - def _headers(self): - return { - "Content-Type": "application/json", - "Authorization": f"Bearer {self._token}", - } + def __init__(self, api_url: str, db_path: str): + self._api_url = api_url.rstrip("/") + self._db_path = db_path + self._token: str | None = None + + def set_token(self, token: str): + self._token = token + + # ── HTTP ────────────────────────────────────────────────────────────────── def _post(self, path: str, data: dict) -> dict: - body = json.dumps(data).encode("utf-8") + body = json.dumps(data).encode() req = urllib.request.Request( - f"{self._server_url}{path}", data=body, - headers=self._headers(), method="POST" + f"{self._api_url}{path}", + data=body, + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {self._token}", + }, + method="POST", ) try: with urllib.request.urlopen(req, timeout=30) as resp: return json.loads(resp.read()) except urllib.error.HTTPError as e: - detail = e.read().decode("utf-8", errors="replace") + detail = e.read().decode() raise Exception(f"HTTP {e.code}: {detail}") def _get(self, path: str) -> dict: req = urllib.request.Request( - f"{self._server_url}{path}", - headers=self._headers(), method="GET" + f"{self._api_url}{path}", + headers={"Authorization": f"Bearer {self._token}"}, ) - with urllib.request.urlopen(req, timeout=30) as resp: - return json.loads(resp.read()) + try: + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read()) + except urllib.error.HTTPError as e: + detail = e.read().decode() + raise Exception(f"HTTP {e.code}: {detail}") # ── Push ────────────────────────────────────────────────────────────────── def push(self, on_done=None, on_error=None): - """Push lokal data til server i baggrundstråd.""" def _run(): try: payload = self._build_push_payload() - logger.info(f"Push OK: {len(payload['songs'])} sange") - result = self._post("/sync/push", payload) - self._save_playlist_ids(result.get("playlist_id_map", {})) - # Fjern soft-slettede playlister permanent efter succesfuld push - if payload.get("deleted_playlists"): - conn = sqlite3.connect(self._db_path) - conn.execute( - "DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL" - ) - conn.commit() - conn.close() - logger.info(f"Push OK: {result.get('songs_synced', '?')} sange synkroniseret") - if on_done: - on_done(result) - except Exception as e: - logger.error(f"Sync push fejl: {e}", exc_info=True) - if on_error: - on_error(str(e)) - threading.Thread(target=_run, daemon=True).start() - - def _save_playlist_ids(self, id_map: dict): - """Gem server-IDs (api_project_id) på lokale playlister.""" - if not id_map: - return - conn = sqlite3.connect(self._db_path) - for local_id, server_id in id_map.items(): - try: - conn.execute( - "UPDATE playlists SET api_project_id=? WHERE id=?", - (server_id, int(local_id)) + logger.info(f"Push: {len(payload['songs'])} sange, " + f"{len(payload['playlists'])} playlister") + result = self._post("/sync/push", payload) + self._save_server_ids( + result.get("song_id_map", {}), + result.get("playlist_id_map", {}), ) - except Exception: - pass - conn.commit() - conn.close() - - def pull(self, on_done=None, on_error=None): - """Pull server-data ned i baggrundstråd.""" - def _run(): - try: - result = self._get("/sync/pull") - pl_count = len(result.get("my_playlists", [])) - logger.info(f"Pull OK: {pl_count} playlister") - self._apply_pull(result) + logger.info(f"Push OK: {result.get('songs_synced','?')} sange synkroniseret") if on_done: on_done(result) except Exception as e: - logger.error(f"Sync pull fejl: {e}", exc_info=True) + logger.error(f"Push fejl: {e}", exc_info=True) if on_error: on_error(str(e)) threading.Thread(target=_run, daemon=True).start() + # ── Push + Pull ─────────────────────────────────────────────────────────── + def push_and_pull(self, on_done=None, on_error=None): - """Push FØR pull — så sletninger når serveren inden pull henter data ned.""" def _run(): try: - # 1. Push lokal data op — inkl. sletninger + # 1. Push payload = self._build_push_payload() deleted = payload.get("deleted_playlists", []) logger.info(f"Sync push — {len(payload['songs'])} sange, " f"{len(payload['playlists'])} playlister, " f"sletter {len(deleted)}: {deleted}") push_result = self._post("/sync/push", payload) - self._save_playlist_ids(push_result.get("playlist_id_map", {})) + self._save_server_ids( + push_result.get("song_id_map", {}), + push_result.get("playlist_id_map", {}), + ) logger.info(f"Push svar: status={push_result.get('status')}, " f"sange={push_result.get('songs_synced', 0)}, " f"playlister={push_result.get('playlists_synced', 0)}") - # 2. Pull — sletninger er nu gennemført på serveren. - # _apply_pull filtrerer is_deleted=1 rækker fra automatisk. + # 2. Pull pull_result = self._get("/sync/pull") pl_names = [p.get("name") for p in pull_result.get("my_playlists", [])] logger.info(f"Pull modtog {len(pl_names)} playlister: {pl_names}") self._apply_pull(pull_result) - # Fjern soft-slettede playlister permanent nu serveren er opdateret + # 3. Fjern soft-slettede permanent efter succesfuld sync if deleted: - conn = sqlite3.connect(self._db_path) + conn = sqlite3.connect(self._db_path, timeout=10) + conn.execute("PRAGMA journal_mode=WAL") conn.execute( "DELETE FROM playlists WHERE is_deleted=1 AND api_project_id IS NOT NULL" ) conn.commit() conn.close() - logger.info(f"Soft-slettede playlister fjernet lokalt efter sync") + logger.info("Soft-slettede playlister fjernet lokalt efter sync") pl_count = len(pull_result.get("my_playlists", [])) - logger.info( - f"Sync OK — {len(payload['songs'])} sange, " - f"{len(payload['playlists'])} playlister, " - f"{pl_count} server-playlister" - ) + logger.info(f"Sync OK — {len(payload['songs'])} sange, " + f"{len(payload['playlists'])} playlister, " + f"{pl_count} server-playlister") if on_done: on_done({"push": push_result, "pull": pull_result}) except Exception as e: @@ -156,34 +128,36 @@ class SyncManager: # ── Byg payload ─────────────────────────────────────────────────────────── def _build_push_payload(self) -> dict: - conn = sqlite3.connect(self._db_path) + conn = sqlite3.connect(self._db_path, timeout=10) conn.row_factory = sqlite3.Row + conn.execute("PRAGMA journal_mode=WAL") - # Sange + # Sange (dem der har filer — altså kendes lokalt) songs = [] - for row in conn.execute( - "SELECT id, title, artist, album, bpm, duration_sec, file_format, mbid, acoustid " - "FROM songs WHERE file_missing=0" - ).fetchall(): + for row in conn.execute(""" + SELECT DISTINCT s.id, s.title, s.artist, s.album, + s.bpm, s.duration_sec, s.mbid, s.acoustid, s.server_synced + FROM songs s + JOIN files f ON f.song_id = s.id AND f.file_missing = 0 + """).fetchall(): songs.append({ - "local_id": str(row["id"]), + "local_id": row["id"], "title": row["title"] or "", "artist": row["artist"] or "", "album": row["album"] or "", "bpm": row["bpm"] or 0, "duration_sec": row["duration_sec"] or 0, - "file_format": row["file_format"] or "", "mbid": row["mbid"] or "", "acoustid": row["acoustid"] or "", }) # Danse dances = [] - for row in conn.execute( - "SELECT d.name, dl.name as level_name, d.choreographer, " - "d.video_url, d.stepsheet_url, d.notes " - "FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id" - ).fetchall(): + for row in conn.execute(""" + SELECT d.name, dl.name as level_name, d.choreographer, + d.video_url, d.stepsheet_url, d.notes + FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id + """).fetchall(): dances.append({ "name": row["name"] or "", "level_name": row["level_name"] or "", @@ -193,16 +167,17 @@ class SyncManager: "notes": row["notes"] or "", }) - # Dans-tags per sang + # Dans-tags song_dances = [] for row in conn.execute(""" - SELECT sd.song_id, d.name as dance_name, dl.name as level_name, sd.dance_order + SELECT sd.song_id, d.name as dance_name, + dl.name as level_name, sd.dance_order FROM song_dances sd JOIN dances d ON d.id = sd.dance_id LEFT JOIN dance_levels dl ON dl.id = d.level_id """).fetchall(): song_dances.append({ - "song_local_id": str(row["song_id"]), + "song_local_id": row["song_id"], "dance_name": row["dance_name"], "level_name": row["level_name"] or "", "dance_order": row["dance_order"], @@ -211,36 +186,36 @@ class SyncManager: # Alternativ-danse song_alts = [] for row in conn.execute(""" - SELECT sad.song_id, d.name as dance_name, dl.name as level_name, sad.note + SELECT sad.song_id, d.name as dance_name, + dl.name as level_name, sad.note FROM song_alt_dances sad JOIN dances d ON d.id = sad.dance_id LEFT JOIN dance_levels dl ON dl.id = d.level_id """).fetchall(): song_alts.append({ - "song_local_id": str(row["song_id"]), + "song_local_id": row["song_id"], "dance_name": row["dance_name"], "level_name": row["level_name"] or "", "note": row["note"] or "", }) - # Playlister — send alle (nye og eksisterende) til serveren. - # Brug api_project_id som local_id hvis den kendes — så serveren - # kan matche på ID og ikke oprette duplikater. + # Playlister — alle ikke-slettede playlists = [] - for pl in conn.execute( - "SELECT id, name, description, tags, api_project_id FROM playlists " - "WHERE name != '__aktiv__' AND is_deleted = 0" - ).fetchall(): + for pl in conn.execute(""" + SELECT id, name, description, tags, api_project_id + FROM playlists + WHERE name != '__aktiv__' AND is_deleted = 0 + """).fetchall(): pl_songs = [] for ps in conn.execute(""" - SELECT s.id, s.title, s.artist, + SELECT s.id as song_id, s.title, s.artist, ps.position, ps.status, ps.is_workshop, ps.dance_override FROM playlist_songs ps JOIN songs s ON s.id = ps.song_id WHERE ps.playlist_id=? ORDER BY ps.position """, (pl["id"],)).fetchall(): pl_songs.append({ - "song_local_id": str(ps["id"]), + "song_local_id": ps["song_id"], "song_title": ps["title"] or "", "song_artist": ps["artist"] or "", "position": int(ps["position"] or 1), @@ -248,9 +223,8 @@ class SyncManager: "is_workshop": bool(ps["is_workshop"]), "dance_override": ps["dance_override"] or "", }) - # Brug api_project_id som local_id hvis den kendes — - # serveren bruger dette til at finde eksisterende liste - local_id = pl["api_project_id"] or str(pl["id"]) + # Brug api_project_id som local_id hvis kendt + local_id = pl["api_project_id"] or pl["id"] playlists.append({ "local_id": local_id, "name": pl["name"], @@ -260,9 +234,7 @@ class SyncManager: "songs": pl_songs, }) - # Slettede playlister — skal fjernes fra serveren. - # Serveren forventer en liste af strings (api_project_id). - # Kun playlister der faktisk er nået serveren (har api_project_id). + # Slettede playlister deleted = [ row["api_project_id"] for row in conn.execute( @@ -273,162 +245,280 @@ class SyncManager: conn.close() return { - "songs": songs, - "dances": dances, - "song_dances": song_dances, - "song_alts": song_alts, - "playlists": playlists, - "deleted_playlists": deleted, + "songs": songs, + "dances": dances, + "song_dances": song_dances, + "song_alts": song_alts, + "playlists": playlists, + "deleted_playlists": deleted, } + # ── Gem server-IDs ──────────────────────────────────────────────────────── + + def _save_server_ids(self, song_id_map: dict, playlist_id_map: dict): + """ + Gem server-IDs lokalt. + song_id_map: lokal_song_id → server_song_id + playlist_id_map: lokal_pl_id → server_pl_id + """ + if not song_id_map and not playlist_id_map: + return + conn = sqlite3.connect(self._db_path, timeout=10) + conn.execute("PRAGMA journal_mode=WAL") + + # Sange: hvis server gav et andet ID end det lokale, opdater + for local_id, server_id in song_id_map.items(): + if local_id != server_id: + # Tjek om server-ID allerede eksisterer + existing = conn.execute( + "SELECT id FROM songs WHERE id=?", (server_id,) + ).fetchone() + if not existing: + # Opdater lokal sang til server-ID + conn.execute( + "UPDATE songs SET id=?, server_synced=1 WHERE id=?", + (server_id, local_id) + ) + # Opdater referencer + conn.execute( + "UPDATE files SET song_id=? WHERE song_id=?", + (server_id, local_id) + ) + conn.execute( + "UPDATE playlist_songs SET song_id=? WHERE song_id=?", + (server_id, local_id) + ) + conn.execute( + "UPDATE song_dances SET song_id=? WHERE song_id=?", + (server_id, local_id) + ) + conn.execute( + "UPDATE song_alt_dances SET song_id=? WHERE song_id=?", + (server_id, local_id) + ) + else: + conn.execute( + "UPDATE songs SET server_synced=1 WHERE id=?", (local_id,) + ) + + # Playlister + for local_id, server_id in playlist_id_map.items(): + conn.execute( + "UPDATE playlists SET api_project_id=? WHERE id=? OR api_project_id=?", + (server_id, local_id, local_id) + ) + + conn.commit() + conn.close() + # ── Anvend pull ─────────────────────────────────────────────────────────── def _apply_pull(self, data: dict): - """Gem server-data lokalt — opdaterer dans-info og importerer playlister.""" - conn = sqlite3.connect(self._db_path) + """Gem server-data lokalt.""" + import uuid + conn = sqlite3.connect(self._db_path, timeout=10) conn.row_factory = sqlite3.Row + conn.execute("PRAGMA journal_mode=WAL") - # Opdater dans-info fra server - for d in data.get("dances", []): - if not d.get("name"): - continue - existing = conn.execute( - "SELECT id FROM dances WHERE name=? COLLATE NOCASE", (d["name"],) - ).fetchone() - if existing and (d.get("choreographer") or d.get("video_url") or d.get("stepsheet_url")): - conn.execute(""" - UPDATE dances SET - choreographer = CASE WHEN choreographer='' THEN ? ELSE choreographer END, - video_url = CASE WHEN video_url='' THEN ? ELSE video_url END, - stepsheet_url = CASE WHEN stepsheet_url='' THEN ? ELSE stepsheet_url END - WHERE id=? - """, (d.get("choreographer",""), d.get("video_url",""), - d.get("stepsheet_url",""), existing["id"])) - - # Importer/opdater egne playlister fra server — server er sandhed - # Hent server-IDs på soft-slettede playlister så vi springer dem over - deleted_server_ids = { - row["api_project_id"] - for row in conn.execute( - "SELECT api_project_id FROM playlists " - "WHERE is_deleted=1 AND api_project_id IS NOT NULL" - ).fetchall() - } - - for pl in data.get("my_playlists", []): - server_id = pl.get("server_id") - name = pl.get("name", "") - if not server_id or not name: - continue - - # Spring over hvis listen er soft-slettet lokalt - if server_id in deleted_server_ids: - continue - - existing = conn.execute( - "SELECT id FROM playlists WHERE api_project_id=?", (server_id,) - ).fetchone() - - if existing: - pl_id = existing["id"] - # Opdater navn hvis det er ændret på serveren - conn.execute( - "UPDATE playlists SET name=? WHERE id=?", (name, pl_id) - ) - else: - cur = conn.execute( - "INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) " - "VALUES (?,?,?,1,'edit')", - (name, pl.get("description",""), server_id) - ) - pl_id = cur.lastrowid - - # Genindlæs sange fra serveren — server er sandhed - conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,)) - position = 1 - for song_data in pl.get("songs", []): - title = song_data.get("title", "") - artist = song_data.get("artist", "") - if not title: + try: + # Opdater dans-info + for d in data.get("dances", []): + if not d.get("name"): continue - local = conn.execute( - "SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1", - (title, artist) + existing = conn.execute( + "SELECT id FROM dances WHERE name=? COLLATE NOCASE", (d["name"],) ).fetchone() - if not local: - import uuid - new_id = str(uuid.uuid4()) + if existing and (d.get("choreographer") or d.get("video_url")): + conn.execute(""" + UPDATE dances SET + choreographer = CASE WHEN choreographer='' THEN ? ELSE choreographer END, + video_url = CASE WHEN video_url='' THEN ? ELSE video_url END, + stepsheet_url = CASE WHEN stepsheet_url='' THEN ? ELSE stepsheet_url END + WHERE id=? + """, (d.get("choreographer",""), d.get("video_url",""), + d.get("stepsheet_url",""), existing["id"])) + + # Hent soft-slettede server-IDs så vi springer dem over + deleted_server_ids = { + row["api_project_id"] + for row in conn.execute( + "SELECT api_project_id FROM playlists " + "WHERE is_deleted=1 AND api_project_id IS NOT NULL" + ).fetchall() + } + + # Importer egne playlister + for pl in data.get("my_playlists", []): + server_id = pl.get("server_id") + name = pl.get("name", "") + if not server_id or not name: + continue + if server_id in deleted_server_ids: + continue + + existing = conn.execute( + "SELECT id FROM playlists WHERE api_project_id=?", (server_id,) + ).fetchone() + + if existing: + pl_id = existing["id"] conn.execute( - "INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)", - (new_id, title, artist) + "UPDATE playlists SET name=? WHERE id=?", (name, pl_id) ) - local_id = new_id else: - local_id = local["id"] + pl_id = str(uuid.uuid4()) + conn.execute( + "INSERT INTO playlists (id, name, description, api_project_id, is_linked, server_permission) " + "VALUES (?,?,?,?,1,'edit')", + (pl_id, name, pl.get("description",""), server_id) + ) - conn.execute(""" - INSERT OR IGNORE INTO playlist_songs - (playlist_id, song_id, position, status, is_workshop, dance_override) - VALUES (?,?,?,?,?,?) - """, (pl_id, local_id, position, - song_data.get("status","pending"), - 1 if song_data.get("is_workshop") else 0, - song_data.get("dance_override","") or "")) - position += 1 - - # Importer delte playlister (read-only — is_linked=1, server_permission='view') - for pl in data.get("shared", []): - server_id = pl.get("server_id") - name = pl.get("name", "") - owner = pl.get("owner", "?") - if not server_id or not name: - continue - - existing = conn.execute( - "SELECT id FROM playlists WHERE api_project_id=?", (server_id,) - ).fetchone() - - if existing: - # Opdater sange fra server (ejer kan have ændret listen) - pl_id = existing["id"] + # Genindlæs sange conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,)) - else: - cur = conn.execute( - "INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) " - "VALUES (?,?,?,1,'view')", - (f"{name} ({owner})", "", server_id) - ) - pl_id = cur.lastrowid + position = 1 + songs_from_server = pl.get("songs", []) + logger.info(f"Pull: liste '{name}' har {len(songs_from_server)} sange") - position = 1 - for song_data in pl.get("songs", []): - title = song_data.get("title", "") - artist = song_data.get("artist", "") - if not title: - continue - local = conn.execute( - "SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1", - (title, artist) - ).fetchone() - if not local: - import uuid - new_id = str(uuid.uuid4()) - conn.execute( - "INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)", - (new_id, title, artist) + for song_data in songs_from_server: + server_song_id = song_data.get("song_id", "") + title = song_data.get("title", "") + artist = song_data.get("artist", "") + mbid = song_data.get("mbid", "") + acoustid = song_data.get("acoustid", "") + + if not title and not server_song_id: + continue + + # Find eller opret sang lokalt + local_song_id = self._find_or_create_song_local( + conn, server_song_id, title, artist, + mbid=mbid, acoustid=acoustid, + bpm=song_data.get("bpm", 0), + duration_sec=song_data.get("duration_sec", 0), ) - local_id = new_id - else: - local_id = local["id"] - conn.execute(""" - INSERT OR IGNORE INTO playlist_songs - (playlist_id, song_id, position, status, is_workshop, dance_override) - VALUES (?,?,?,?,?,?) - """, (pl_id, local_id, position, - song_data.get("status","pending"), - 1 if song_data.get("is_workshop") else 0, - song_data.get("dance_override","") or "")) - position += 1 - conn.commit() - conn.close() \ No newline at end of file + # Find tilgængelig fil til denne sang + file_row = conn.execute( + "SELECT id FROM files WHERE song_id=? AND file_missing=0 LIMIT 1", + (local_song_id,) + ).fetchone() + file_id = file_row["id"] if file_row else None + + conn.execute(""" + INSERT INTO playlist_songs + (id, playlist_id, song_id, file_id, position, status, is_workshop, dance_override) + VALUES (?,?,?,?,?,?,?,?) + """, (str(uuid.uuid4()), pl_id, local_song_id, file_id, position, + song_data.get("status","pending"), + 1 if song_data.get("is_workshop") else 0, + song_data.get("dance_override","") or "")) + position += 1 + + # Importer delte playlister + for pl in data.get("shared", []): + server_id = pl.get("server_id") + name = pl.get("name", "") + owner = pl.get("owner", "?") + if not server_id or not name: + continue + + existing = conn.execute( + "SELECT id FROM playlists WHERE api_project_id=?", (server_id,) + ).fetchone() + + if existing: + pl_id = existing["id"] + conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,)) + else: + pl_id = str(uuid.uuid4()) + conn.execute( + "INSERT INTO playlists (id, name, description, api_project_id, is_linked, server_permission) " + "VALUES (?,?,?,?,1,'view')", + (pl_id, f"{name} ({owner})", "", server_id) + ) + + position = 1 + for song_data in pl.get("songs", []): + server_song_id = song_data.get("song_id", "") + title = song_data.get("title", "") + artist = song_data.get("artist", "") + if not title and not server_song_id: + continue + + local_song_id = self._find_or_create_song_local( + conn, server_song_id, title, artist, + mbid=song_data.get("mbid", ""), + acoustid=song_data.get("acoustid", ""), + ) + file_row = conn.execute( + "SELECT id FROM files WHERE song_id=? AND file_missing=0 LIMIT 1", + (local_song_id,) + ).fetchone() + file_id = file_row["id"] if file_row else None + + conn.execute(""" + INSERT INTO playlist_songs + (id, playlist_id, song_id, file_id, position, status, is_workshop, dance_override) + VALUES (?,?,?,?,?,?,?,?) + """, (str(uuid.uuid4()), pl_id, local_song_id, file_id, position, + song_data.get("status","pending"), + 1 if song_data.get("is_workshop") else 0, + song_data.get("dance_override","") or "")) + position += 1 + + conn.commit() + + except Exception: + conn.rollback() + raise + finally: + conn.close() + + def _find_or_create_song_local(self, conn, server_song_id: str, title: str, + artist: str = "", mbid: str = "", + acoustid: str = "", bpm: int = 0, + duration_sec: int = 0) -> str: + """Find eller opret sang lokalt. Returnerer lokal song_id.""" + import uuid + + # Match på server-ID + if server_song_id: + row = conn.execute( + "SELECT id FROM songs WHERE id=?", (server_song_id,) + ).fetchone() + if row: + return row["id"] + + # Match på MBID + if mbid: + row = conn.execute( + "SELECT id FROM songs WHERE mbid=?", (mbid,) + ).fetchone() + if row: + return row["id"] + + # Match på AcoustID + if acoustid: + row = conn.execute( + "SELECT id FROM songs WHERE acoustid=?", (acoustid,) + ).fetchone() + if row: + return row["id"] + + # Match på titel + artist + if title: + row = conn.execute( + "SELECT id FROM songs WHERE title=? AND artist=?", (title, artist) + ).fetchone() + if row: + return row["id"] + + # Opret ny — brug server-ID hvis tilgængeligt + new_id = server_song_id or str(uuid.uuid4()) + conn.execute( + "INSERT INTO songs (id, title, artist, bpm, duration_sec, mbid, acoustid, server_synced) " + "VALUES (?,?,?,?,?,?,?,1)", + (new_id, title, artist, bpm, duration_sec, mbid or None, acoustid or None) + ) + logger.info(f"Pull: oprettet sang '{title}' ({new_id})") + return new_id \ No newline at end of file diff --git a/linedance-app/ui/playlist_panel.py b/linedance-app/ui/playlist_panel.py index d66ee67e..885918b6 100644 --- a/linedance-app/ui/playlist_panel.py +++ b/linedance-app/ui/playlist_panel.py @@ -406,9 +406,13 @@ class PlaylistPanel(QWidget): return False # Hent sange med status, workshop og dans-override + # JOIN songs — sangen er altid i songs tabellen (oprettet ved pull med file_missing=1) + # file_missing betyder bare at filen ikke er på denne maskine songs_raw = conn.execute(""" - SELECT s.*, ps.position, ps.status, - ps.is_workshop, ps.dance_override + SELECT s.id, s.title, s.artist, s.album, + s.bpm, s.duration_sec, s.file_format, + s.local_path, s.file_missing, + ps.position, ps.status, ps.is_workshop, ps.dance_override FROM playlist_songs ps JOIN songs s ON s.id = ps.song_id WHERE ps.playlist_id=? ORDER BY ps.position @@ -426,10 +430,10 @@ class PlaylistPanel(QWidget): override = row["dance_override"] or "" active_dance = override if override else (dance_names[0] if dance_names else "") - local_path = row["local_path"] + local_path = row["local_path"] or "" file_missing = bool(row["file_missing"]) - # Forsøg at finde sangen lokalt hvis den mangler + # Forsøg at finde filen lokalt hvis den mangler på denne maskine if file_missing or not local_path: match = conn.execute(""" SELECT local_path FROM songs @@ -444,11 +448,11 @@ class PlaylistPanel(QWidget): "id": row["id"], "title": row["title"], "artist": row["artist"], - "album": row["album"], - "bpm": row["bpm"], - "duration_sec": row["duration_sec"], + "album": row["album"] or "", + "bpm": row["bpm"] or 0, + "duration_sec": row["duration_sec"] or 0, "local_path": local_path, - "file_format": row["file_format"], + "file_format": row["file_format"] or "", "file_missing": file_missing, "dances": dance_names, "active_dance": active_dance, @@ -605,9 +609,12 @@ class PlaylistPanel(QWidget): else: self._can_edit_server = False with get_db() as conn: + # JOIN songs — sangen er altid i songs tabellen (oprettet ved pull med file_missing=1) songs_raw = conn.execute(""" - SELECT s.*, ps.position, ps.status, - ps.is_workshop, ps.dance_override + SELECT s.id, s.title, s.artist, s.album, + s.bpm, s.duration_sec, s.file_format, + s.local_path, s.file_missing, + ps.position, ps.status, ps.is_workshop, ps.dance_override FROM playlist_songs ps JOIN songs s ON s.id = ps.song_id WHERE ps.playlist_id=? ORDER BY ps.position @@ -618,17 +625,17 @@ class PlaylistPanel(QWidget): for row in songs_raw: dances = conn.execute(""" SELECT d.name FROM song_dances sd - JOIN dances d ON d.id = sd.dance_id + JOIN dances d ON d.id = sad.dance_id WHERE sd.song_id=? ORDER BY sd.dance_order """, (row["id"],)).fetchall() dance_names = [d["name"] for d in dances] override = row["dance_override"] or "" active_dance = override if override else (dance_names[0] if dance_names else "") - local_path = row["local_path"] + local_path = row["local_path"] or "" file_missing = bool(row["file_missing"]) - # Forsøg at finde sangen lokalt hvis den mangler + # Forsøg at finde filen lokalt hvis den mangler på denne maskine if file_missing or not local_path: match = conn.execute(""" SELECT local_path FROM songs @@ -644,11 +651,11 @@ class PlaylistPanel(QWidget): "id": row["id"], "title": row["title"], "artist": row["artist"], - "album": row["album"], - "bpm": row["bpm"], - "duration_sec": row["duration_sec"], + "album": row["album"] or "", + "bpm": row["bpm"] or 0, + "duration_sec": row["duration_sec"] or 0, "local_path": local_path, - "file_format": row["file_format"], + "file_format": row["file_format"] or "", "file_missing": file_missing, "dances": dance_names, "active_dance": active_dance, @@ -1251,4 +1258,4 @@ class PlaylistPanel(QWidget): def _on_double_click(self, item: QListWidgetItem): idx = item.data(Qt.ItemDataRole.UserRole) if idx is not None: - self.song_selected.emit(idx) + self.song_selected.emit(idx) \ No newline at end of file