412 lines
17 KiB
Python
412 lines
17 KiB
Python
"""
|
|
sync_manager.py — Synkronisering mellem lokal SQLite og server API.
|
|
Kører i baggrundstråd — blokerer aldrig GUI.
|
|
"""
|
|
import json
|
|
import sqlite3
|
|
import threading
|
|
import urllib.request
|
|
import urllib.error
|
|
import logging
|
|
from pathlib import Path
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class SyncManager:
|
|
def __init__(self, db_path: str, server_url: str, token: str):
|
|
self._db_path = db_path
|
|
self._server_url = server_url.rstrip("/")
|
|
self._token = token
|
|
self._lock = threading.Lock()
|
|
|
|
def _headers(self):
|
|
return {
|
|
"Content-Type": "application/json",
|
|
"Authorization": f"Bearer {self._token}",
|
|
}
|
|
|
|
def _post(self, path: str, data: dict) -> dict:
|
|
body = json.dumps(data).encode("utf-8")
|
|
req = urllib.request.Request(
|
|
f"{self._server_url}{path}", data=body,
|
|
headers=self._headers(), method="POST"
|
|
)
|
|
try:
|
|
with urllib.request.urlopen(req, timeout=30) as resp:
|
|
return json.loads(resp.read())
|
|
except urllib.error.HTTPError as e:
|
|
detail = e.read().decode("utf-8", errors="replace")
|
|
raise Exception(f"HTTP {e.code}: {detail}")
|
|
|
|
def _get(self, path: str) -> dict:
|
|
req = urllib.request.Request(
|
|
f"{self._server_url}{path}",
|
|
headers=self._headers(), method="GET"
|
|
)
|
|
with urllib.request.urlopen(req, timeout=30) as resp:
|
|
return json.loads(resp.read())
|
|
|
|
# ── Push ──────────────────────────────────────────────────────────────────
|
|
|
|
def push(self, on_done=None, on_error=None):
|
|
"""Push lokal data til server i baggrundstråd."""
|
|
def _run():
|
|
try:
|
|
payload = self._build_push_payload()
|
|
logger.info(f"Push OK: {len(payload['songs'])} sange")
|
|
result = self._post("/sync/push", payload)
|
|
self._save_playlist_ids(result.get("playlist_id_map", {}))
|
|
# Ryd deleted_playlists nu de er sendt til serveren
|
|
if payload.get("deleted_playlists"):
|
|
conn = sqlite3.connect(self._db_path)
|
|
conn.execute("DELETE FROM deleted_playlists")
|
|
conn.commit()
|
|
conn.close()
|
|
logger.info(f"Push OK: {result.get('songs_synced', '?')} sange synkroniseret")
|
|
if on_done:
|
|
on_done(result)
|
|
except Exception as e:
|
|
logger.error(f"Sync push fejl: {e}", exc_info=True)
|
|
if on_error:
|
|
on_error(str(e))
|
|
threading.Thread(target=_run, daemon=True).start()
|
|
|
|
def _save_playlist_ids(self, id_map: dict):
|
|
"""Gem server-IDs (api_project_id) på lokale playlister."""
|
|
if not id_map:
|
|
return
|
|
conn = sqlite3.connect(self._db_path)
|
|
for local_id, server_id in id_map.items():
|
|
try:
|
|
conn.execute(
|
|
"UPDATE playlists SET api_project_id=? WHERE id=?",
|
|
(server_id, int(local_id))
|
|
)
|
|
except Exception:
|
|
pass
|
|
conn.commit()
|
|
conn.close()
|
|
|
|
def pull(self, on_done=None, on_error=None):
|
|
"""Pull server-data ned i baggrundstråd."""
|
|
def _run():
|
|
try:
|
|
result = self._get("/sync/pull")
|
|
pl_count = len(result.get("my_playlists", []))
|
|
logger.info(f"Pull OK: {pl_count} playlister")
|
|
self._apply_pull(result)
|
|
if on_done:
|
|
on_done(result)
|
|
except Exception as e:
|
|
logger.error(f"Sync pull fejl: {e}", exc_info=True)
|
|
if on_error:
|
|
on_error(str(e))
|
|
threading.Thread(target=_run, daemon=True).start()
|
|
|
|
def push_and_pull(self, on_done=None, on_error=None):
|
|
"""Push FØR pull — så sletninger når serveren inden pull henter data ned."""
|
|
def _run():
|
|
try:
|
|
# 1. Push lokal data op — inkl. sletninger
|
|
payload = self._build_push_payload()
|
|
push_result = self._post("/sync/push", payload)
|
|
self._save_playlist_ids(push_result.get("playlist_id_map", {}))
|
|
|
|
# Ryd deleted_playlists nu de er sendt
|
|
if payload.get("deleted_playlists"):
|
|
conn = sqlite3.connect(self._db_path)
|
|
conn.execute("DELETE FROM deleted_playlists")
|
|
conn.commit()
|
|
conn.close()
|
|
|
|
# 2. Pull — nu er sletninger gennemført på serveren
|
|
pull_result = self._get("/sync/pull")
|
|
self._apply_pull(pull_result)
|
|
|
|
pl_count = len(pull_result.get("my_playlists", []))
|
|
logger.info(
|
|
f"Sync OK — {len(payload['songs'])} sange, "
|
|
f"{len(payload['playlists'])} playlister, "
|
|
f"{pl_count} server-playlister"
|
|
)
|
|
if on_done:
|
|
on_done({"push": push_result, "pull": pull_result})
|
|
except Exception as e:
|
|
logger.error(f"push_and_pull fejl: {e}", exc_info=True)
|
|
if on_error:
|
|
on_error(str(e))
|
|
threading.Thread(target=_run, daemon=True).start()
|
|
|
|
# ── Byg payload ───────────────────────────────────────────────────────────
|
|
|
|
def _build_push_payload(self) -> dict:
|
|
conn = sqlite3.connect(self._db_path)
|
|
conn.row_factory = sqlite3.Row
|
|
|
|
# Sange
|
|
songs = []
|
|
for row in conn.execute(
|
|
"SELECT id, title, artist, album, bpm, duration_sec, file_format, mbid, acoustid "
|
|
"FROM songs WHERE file_missing=0"
|
|
).fetchall():
|
|
songs.append({
|
|
"local_id": str(row["id"]),
|
|
"title": row["title"] or "",
|
|
"artist": row["artist"] or "",
|
|
"album": row["album"] or "",
|
|
"bpm": row["bpm"] or 0,
|
|
"duration_sec": row["duration_sec"] or 0,
|
|
"file_format": row["file_format"] or "",
|
|
"mbid": row["mbid"] or "",
|
|
"acoustid": row["acoustid"] or "",
|
|
})
|
|
|
|
# Danse
|
|
dances = []
|
|
for row in conn.execute(
|
|
"SELECT d.name, dl.name as level_name, d.choreographer, "
|
|
"d.video_url, d.stepsheet_url, d.notes "
|
|
"FROM dances d LEFT JOIN dance_levels dl ON dl.id = d.level_id"
|
|
).fetchall():
|
|
dances.append({
|
|
"name": row["name"] or "",
|
|
"level_name": row["level_name"] or "",
|
|
"choreographer": row["choreographer"] or "",
|
|
"video_url": row["video_url"] or "",
|
|
"stepsheet_url": row["stepsheet_url"] or "",
|
|
"notes": row["notes"] or "",
|
|
})
|
|
|
|
# Dans-tags per sang
|
|
song_dances = []
|
|
for row in conn.execute("""
|
|
SELECT sd.song_id, d.name as dance_name, dl.name as level_name, sd.dance_order
|
|
FROM song_dances sd
|
|
JOIN dances d ON d.id = sd.dance_id
|
|
LEFT JOIN dance_levels dl ON dl.id = d.level_id
|
|
""").fetchall():
|
|
song_dances.append({
|
|
"song_local_id": str(row["song_id"]),
|
|
"dance_name": row["dance_name"],
|
|
"level_name": row["level_name"] or "",
|
|
"dance_order": row["dance_order"],
|
|
})
|
|
|
|
# Alternativ-danse
|
|
song_alts = []
|
|
for row in conn.execute("""
|
|
SELECT sad.song_id, d.name as dance_name, dl.name as level_name, sad.note
|
|
FROM song_alt_dances sad
|
|
JOIN dances d ON d.id = sad.dance_id
|
|
LEFT JOIN dance_levels dl ON dl.id = d.level_id
|
|
""").fetchall():
|
|
song_alts.append({
|
|
"song_local_id": str(row["song_id"]),
|
|
"dance_name": row["dance_name"],
|
|
"level_name": row["level_name"] or "",
|
|
"note": row["note"] or "",
|
|
})
|
|
|
|
# Playlister (kun lokalt oprettede — IKKE dem der kom fra serveren)
|
|
playlists = []
|
|
for pl in conn.execute(
|
|
"SELECT id, name, description, tags, api_project_id FROM playlists "
|
|
"WHERE name != '__aktiv__' AND (api_project_id IS NULL OR api_project_id = '')"
|
|
).fetchall():
|
|
pl_songs = []
|
|
for ps in conn.execute("""
|
|
SELECT s.id, s.title, s.artist,
|
|
ps.position, ps.status, ps.is_workshop, ps.dance_override
|
|
FROM playlist_songs ps
|
|
JOIN songs s ON s.id = ps.song_id
|
|
WHERE ps.playlist_id=? ORDER BY ps.position
|
|
""", (pl["id"],)).fetchall():
|
|
pl_songs.append({
|
|
"song_local_id": str(ps["id"]),
|
|
"song_title": ps["title"] or "",
|
|
"song_artist": ps["artist"] or "",
|
|
"position": int(ps["position"] or 1),
|
|
"status": ps["status"] or "pending",
|
|
"is_workshop": bool(ps["is_workshop"]),
|
|
"dance_override": ps["dance_override"] or "",
|
|
})
|
|
playlists.append({
|
|
"local_id": str(pl["id"]),
|
|
"name": pl["name"],
|
|
"description": pl["description"] or "",
|
|
"tags": pl["tags"] or "",
|
|
"visibility": "private",
|
|
"songs": pl_songs,
|
|
})
|
|
|
|
# Slettede playlister — skal fjernes fra serveren
|
|
deleted = [
|
|
row["name"] for row in conn.execute(
|
|
"SELECT name FROM deleted_playlists"
|
|
).fetchall()
|
|
]
|
|
|
|
conn.close()
|
|
return {
|
|
"songs": songs,
|
|
"dances": dances,
|
|
"song_dances": song_dances,
|
|
"song_alts": song_alts,
|
|
"playlists": playlists,
|
|
"deleted_playlists": deleted,
|
|
}
|
|
|
|
# ── Anvend pull ───────────────────────────────────────────────────────────
|
|
|
|
def _apply_pull(self, data: dict):
|
|
"""Gem server-data lokalt — opdaterer dans-info og importerer playlister."""
|
|
conn = sqlite3.connect(self._db_path)
|
|
conn.row_factory = sqlite3.Row
|
|
|
|
# Opdater dans-info fra server
|
|
for d in data.get("dances", []):
|
|
if not d.get("name"):
|
|
continue
|
|
existing = conn.execute(
|
|
"SELECT id FROM dances WHERE name=? COLLATE NOCASE", (d["name"],)
|
|
).fetchone()
|
|
if existing and (d.get("choreographer") or d.get("video_url") or d.get("stepsheet_url")):
|
|
conn.execute("""
|
|
UPDATE dances SET
|
|
choreographer = CASE WHEN choreographer='' THEN ? ELSE choreographer END,
|
|
video_url = CASE WHEN video_url='' THEN ? ELSE video_url END,
|
|
stepsheet_url = CASE WHEN stepsheet_url='' THEN ? ELSE stepsheet_url END
|
|
WHERE id=?
|
|
""", (d.get("choreographer",""), d.get("video_url",""),
|
|
d.get("stepsheet_url",""), existing["id"]))
|
|
|
|
# Importer/opdater egne playlister fra server — server er sandhed
|
|
# Hent navne på lokalt slettede playlister så vi ikke genskaber dem
|
|
try:
|
|
deleted_names = set(
|
|
row["name"] for row in conn.execute(
|
|
"SELECT name FROM deleted_playlists"
|
|
).fetchall()
|
|
)
|
|
except Exception:
|
|
deleted_names = set()
|
|
|
|
for pl in data.get("my_playlists", []):
|
|
server_id = pl.get("server_id")
|
|
name = pl.get("name", "")
|
|
if not server_id or not name:
|
|
continue
|
|
|
|
# Spring over hvis listen er markeret som slettet lokalt
|
|
if name in deleted_names:
|
|
continue
|
|
|
|
existing = conn.execute(
|
|
"SELECT id FROM playlists WHERE api_project_id=?", (server_id,)
|
|
).fetchone()
|
|
|
|
if existing:
|
|
pl_id = existing["id"]
|
|
# Opdater navn hvis det er ændret på serveren
|
|
conn.execute(
|
|
"UPDATE playlists SET name=? WHERE id=?", (name, pl_id)
|
|
)
|
|
else:
|
|
cur = conn.execute(
|
|
"INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) "
|
|
"VALUES (?,?,?,1,'edit')",
|
|
(name, pl.get("description",""), server_id)
|
|
)
|
|
pl_id = cur.lastrowid
|
|
|
|
# Genindlæs sange fra serveren — server er sandhed
|
|
conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,))
|
|
position = 1
|
|
for song_data in pl.get("songs", []):
|
|
title = song_data.get("title", "")
|
|
artist = song_data.get("artist", "")
|
|
if not title:
|
|
continue
|
|
local = conn.execute(
|
|
"SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1",
|
|
(title, artist)
|
|
).fetchone()
|
|
if not local:
|
|
import uuid
|
|
new_id = str(uuid.uuid4())
|
|
conn.execute(
|
|
"INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)",
|
|
(new_id, title, artist)
|
|
)
|
|
local_id = new_id
|
|
else:
|
|
local_id = local["id"]
|
|
|
|
conn.execute("""
|
|
INSERT OR IGNORE INTO playlist_songs
|
|
(playlist_id, song_id, position, status, is_workshop, dance_override)
|
|
VALUES (?,?,?,?,?,?)
|
|
""", (pl_id, local_id, position,
|
|
song_data.get("status","pending"),
|
|
1 if song_data.get("is_workshop") else 0,
|
|
song_data.get("dance_override","") or ""))
|
|
position += 1
|
|
|
|
# Importer delte playlister (read-only — is_linked=1, server_permission='view')
|
|
for pl in data.get("shared", []):
|
|
server_id = pl.get("server_id")
|
|
name = pl.get("name", "")
|
|
owner = pl.get("owner", "?")
|
|
if not server_id or not name:
|
|
continue
|
|
|
|
existing = conn.execute(
|
|
"SELECT id FROM playlists WHERE api_project_id=?", (server_id,)
|
|
).fetchone()
|
|
|
|
if existing:
|
|
# Opdater sange fra server (ejer kan have ændret listen)
|
|
pl_id = existing["id"]
|
|
conn.execute("DELETE FROM playlist_songs WHERE playlist_id=?", (pl_id,))
|
|
else:
|
|
cur = conn.execute(
|
|
"INSERT INTO playlists (name, description, api_project_id, is_linked, server_permission) "
|
|
"VALUES (?,?,?,1,'view')",
|
|
(f"{name} ({owner})", "", server_id)
|
|
)
|
|
pl_id = cur.lastrowid
|
|
|
|
position = 1
|
|
for song_data in pl.get("songs", []):
|
|
title = song_data.get("title", "")
|
|
artist = song_data.get("artist", "")
|
|
if not title:
|
|
continue
|
|
local = conn.execute(
|
|
"SELECT id FROM songs WHERE title=? AND artist=? LIMIT 1",
|
|
(title, artist)
|
|
).fetchone()
|
|
if not local:
|
|
import uuid
|
|
new_id = str(uuid.uuid4())
|
|
conn.execute(
|
|
"INSERT OR IGNORE INTO songs (id, title, artist, file_missing) VALUES (?,?,?,1)",
|
|
(new_id, title, artist)
|
|
)
|
|
local_id = new_id
|
|
else:
|
|
local_id = local["id"]
|
|
conn.execute("""
|
|
INSERT OR IGNORE INTO playlist_songs
|
|
(playlist_id, song_id, position, status, is_workshop, dance_override)
|
|
VALUES (?,?,?,?,?,?)
|
|
""", (pl_id, local_id, position,
|
|
song_data.get("status","pending"),
|
|
1 if song_data.get("is_workshop") else 0,
|
|
song_data.get("dance_override","") or ""))
|
|
position += 1
|
|
|
|
conn.commit()
|
|
conn.close()
|