From a0a869c5a5e9ac5a7b1e2b21842f69ec6350738f Mon Sep 17 00:00:00 2001 From: Linrador <68631622+Linrador@users.noreply.github.com> Date: Tue, 3 Mar 2026 21:14:39 +0100 Subject: [PATCH] updated --- backend/{generate.go => assets_generate.go} | 39 +- .../{generate_sprite.go => assets_sprite.go} | 2 +- backend/chaturbate_online.go | 171 ++ backend/generated_gc.go | 185 -- backend/http_teaser.go | 202 -- backend/live.go | 674 +++++ backend/main.go | 29 + backend/models_store.go | 4 +- backend/{postwork_queue.go => postwork.go} | 41 +- backend/postwork_refresh.go | 42 - backend/preview.go | 2286 +++++++++++++++++ backend/preview_covers.go | 1177 --------- backend/preview_hls.go | 393 --- backend/preview_m3u8_rewrite.go | 100 - backend/preview_status_svg.go | 86 - backend/preview_teaser.go | 455 ---- backend/preview_webp.go | 728 ------ backend/{record_handlers.go => record.go} | 921 ++----- backend/record_job_progress.go | 105 - ...ecord_helpers_paths.go => record_paths.go} | 177 +- backend/record_preview_scrubber.go | 122 - backend/record_preview_sprite.go | 67 - backend/{record_start.go => recorder.go} | 316 ++- backend/routes.go | 11 +- backend/settings.go | 12 + backend/{cleanup.go => tasks_cleanup.go} | 209 +- frontend/src/App.tsx | 315 +-- .../ui/FinishedDownloadsCardsView.tsx | 111 +- .../ui/FinishedDownloadsGalleryView.tsx | 15 +- .../components/ui/FinishedVideoPreview.tsx | 8 +- frontend/src/components/ui/LiveHlsVideo.tsx | 45 +- frontend/src/components/ui/ModelDetails.tsx | 1153 +++++++-- frontend/src/components/ui/ModelPreview.tsx | 4 +- frontend/src/components/ui/Pagination.tsx | 4 +- ...oller.ts => chaturbateOnlinePoller.ts.bak} | 0 frontend/vite.config.ts | 1 + 36 files changed, 5159 insertions(+), 5051 deletions(-) rename backend/{generate.go => assets_generate.go} (94%) rename backend/{generate_sprite.go => assets_sprite.go} (99%) delete mode 100644 backend/generated_gc.go delete mode 100644 backend/http_teaser.go create mode 100644 backend/live.go rename backend/{postwork_queue.go => postwork.go} (87%) delete mode 100644 backend/postwork_refresh.go create mode 100644 backend/preview.go delete mode 100644 backend/preview_covers.go delete mode 100644 backend/preview_hls.go delete mode 100644 backend/preview_m3u8_rewrite.go delete mode 100644 backend/preview_status_svg.go delete mode 100644 backend/preview_teaser.go delete mode 100644 backend/preview_webp.go rename backend/{record_handlers.go => record.go} (67%) delete mode 100644 backend/record_job_progress.go rename backend/{record_helpers_paths.go => record_paths.go} (50%) delete mode 100644 backend/record_preview_scrubber.go delete mode 100644 backend/record_preview_sprite.go rename backend/{record_start.go => recorder.go} (62%) rename backend/{cleanup.go => tasks_cleanup.go} (66%) rename frontend/src/lib/{chaturbateOnlinePoller.ts => chaturbateOnlinePoller.ts.bak} (100%) diff --git a/backend/generate.go b/backend/assets_generate.go similarity index 94% rename from backend/generate.go rename to backend/assets_generate.go index 3ea6cf0..6f8b556 100644 --- a/backend/generate.go +++ b/backend/assets_generate.go @@ -201,10 +201,21 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU return out, nil } - // 🔒 Schutz gegen Race: sehr frische Dateien sind evtl. noch nicht finalisiert/kopiert - // (typisch: moov atom fehlt noch) - if time.Since(fi.ModTime()) < 10*time.Second { - return out, nil + // 🔒 Schutz gegen Race: sehr frische Dateien sind evtl. noch nicht finalisiert/kopiert. + // Statt direkt zu skippen: kurz warten und dann weitermachen (sonst gibt es keinen Retry). + if age := time.Since(fi.ModTime()); age < 10*time.Second { + wait := 10*time.Second - age + // nicht ewig blocken, respektiere ctx + if wait > 0 { + t := time.NewTimer(wait) + defer t.Stop() + select { + case <-t.C: + // weiter + case <-ctx.Done(): + return out, ctx.Err() + } + } } id := assetIDFromVideoPath(videoPath) @@ -295,16 +306,18 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU progress(0.10) - t := 0.0 - if meta.durSec > 0 { - t = meta.durSec * 0.5 - } - - progress(0.15) - - img, e1 := extractFrameAtTimeWebP(videoPath, t) + // ✅ Immer letztes Frame bevorzugen (Preview soll “Endzustand” zeigen) + img, e1 := extractLastFrameWebP(videoPath) if e1 != nil || len(img) == 0 { - img, e1 = extractLastFrameWebP(videoPath) + // Fallback: wenn wir Duration kennen, versuche kurz vor Ende + if meta.durSec > 0 { + t := meta.durSec - 0.25 + if t < 0 { + t = 0 + } + img, e1 = extractFrameAtTimeWebP(videoPath, t) + } + // Letzter Fallback: erstes Frame if e1 != nil || len(img) == 0 { img, e1 = extractFirstFrameWebPScaled(videoPath, 720, 75) } diff --git a/backend/generate_sprite.go b/backend/assets_sprite.go similarity index 99% rename from backend/generate_sprite.go rename to backend/assets_sprite.go index 2586ac2..f065bf8 100644 --- a/backend/generate_sprite.go +++ b/backend/assets_sprite.go @@ -1,4 +1,4 @@ -// backend\generate_sprite.go +// backend\assets_sprite.go package main diff --git a/backend/chaturbate_online.go b/backend/chaturbate_online.go index ab626a1..0b29ac4 100644 --- a/backend/chaturbate_online.go +++ b/backend/chaturbate_online.go @@ -9,6 +9,7 @@ import ( "fmt" "io" "net/http" + "path/filepath" "sort" "strconv" "strings" @@ -91,6 +92,25 @@ var ( cbRefreshInFlight bool ) +// --- HLS refresh throttling (damit /online nicht zu teuer wird) --- +var cbHlsRefreshMu sync.Mutex +var cbHlsRefreshAt = map[string]time.Time{} // key=userLower -> last refresh time + +func shouldRefreshHLS(userLower string, minInterval time.Duration) bool { + if userLower == "" { + return false + } + cbHlsRefreshMu.Lock() + defer cbHlsRefreshMu.Unlock() + + last := cbHlsRefreshAt[userLower] + if !last.IsZero() && time.Since(last) < minInterval { + return false + } + cbHlsRefreshAt[userLower] = time.Now() + return true +} + func normalizeList(in []string) []string { seen := map[string]bool{} out := make([]string, 0, len(in)) @@ -514,6 +534,109 @@ func hashKey(parts ...string) string { return hex.EncodeToString(h.Sum(nil)) } +// jobMatchesUser prüft, ob ein laufender Job zu diesem Username gehört. +// (wir matchen über SourceURL und Output-Pfad – robust genug ohne modelNameFromFilename Abhängigkeit) +func jobMatchesUser(j *RecordJob, userLower string) bool { + if j == nil { + return false + } + u := strings.ToLower(strings.TrimSpace(userLower)) + if u == "" { + return false + } + + // 1) SourceURL enthält meist / + if s := strings.ToLower(strings.TrimSpace(j.SourceURL)); s != "" { + if strings.Contains(s, "/"+u) || strings.HasSuffix(s, "/"+u) || strings.HasSuffix(s, u) { + return true + } + } + + // 2) Output-Pfad enthält bei dir häufig den modelKey im Dateinamen/Ordner + if out := strings.ToLower(strings.TrimSpace(j.Output)); out != "" { + base := strings.ToLower(strings.TrimSpace(filepath.Base(out))) + if strings.Contains(base, u) { + return true + } + dir := strings.ToLower(strings.TrimSpace(filepath.Base(filepath.Dir(out)))) + if dir == u { + return true + } + } + + return false +} + +// fetchCurrentBestHLS lädt die Room-Seite, parsed hls_source und wählt die beste Variant-Playlist. +func fetchCurrentBestHLS(ctx context.Context, username string, cookie string, userAgent string) (string, error) { + u := strings.TrimSpace(username) + if u == "" { + return "", fmt.Errorf("empty username") + } + + hc := NewHTTPClient(userAgent) + pageURL := "https://chaturbate.com/" + strings.Trim(u, "/") + + body, err := hc.FetchPage(ctx, pageURL, cookie) + if err != nil { + return "", err + } + + master, err := ParseStream(body) // -> hls_source + if err != nil { + return "", err + } + + pl, err := FetchPlaylist(ctx, hc, master, cookie) // -> beste Variant + if err != nil { + return "", err + } + + return strings.TrimSpace(pl.PlaylistURL), nil +} + +// refreshRunningJobsHLS aktualisiert PreviewM3U8 (+Cookie/UA) für passende laufende Jobs. +// Wenn die URL rotiert hat: stopPreview(job) damit ffmpeg neu startet. +func refreshRunningJobsHLS(userLower string, newHls string, cookie string, ua string) { + if strings.TrimSpace(userLower) == "" || strings.TrimSpace(newHls) == "" { + return + } + + changedAny := false + + jobsMu.Lock() + for _, j := range jobs { + if j == nil || j.Status != JobRunning { + continue + } + if !jobMatchesUser(j, userLower) { + continue + } + + old := strings.TrimSpace(j.PreviewM3U8) + + j.PreviewM3U8 = newHls + j.PreviewCookie = cookie + j.PreviewUA = ua + + // Wenn ffmpeg schon läuft und sich Quelle geändert hat -> hart stoppen + if old != "" && old != newHls { + stopPreview(j) + // PreviewState zurücksetzen (damit "private/offline" nicht hängen bleibt) + j.PreviewState = "" + j.PreviewStateAt = "" + j.PreviewStateMsg = "" + } + + changedAny = true + } + jobsMu.Unlock() + + if changedAny { + notifyJobsChanged() + } +} + func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet && r.Method != http.MethodPost { http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed) @@ -522,6 +645,15 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) { enabled := getSettings().UseChaturbateAPI + // Optional: Cookie vom Frontend (für Cloudflare/session – best effort) + cookieHeader := strings.TrimSpace(r.Header.Get("X-Chaturbate-Cookie")) + + // UA vom Client (oder fallback) + reqUA := strings.TrimSpace(r.Header.Get("User-Agent")) + if reqUA == "" { + reqUA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64)" + } + // --------------------------- // Request params (GET/POST) // --------------------------- @@ -717,6 +849,45 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) { liteByUser := cb.LiteByUser cbMu.RUnlock() + // --------------------------- + // ✅ HLS URL Refresh für laufende Jobs (best effort) + // Trigger nur, wenn explizite Users angefragt werden (dein Frontend macht das so) + // und nur wenn User gerade online ist. + // --------------------------- + if onlySpecificUsers && liteByUser != nil { + const hlsMinInterval = 12 * time.Second // throttle pro user + + for _, u := range users { + rm, ok := liteByUser[u] + if !ok { + continue // offline -> nichts + } + + // Optional: nur wenn wirklich "public" (reduziert unnötige fetches) + // Wenn du auch in "private" previewen willst, entferne diesen Block. + show := strings.ToLower(strings.TrimSpace(rm.CurrentShow)) + if show == "offline" || show == "" { + continue + } + + // throttle + if !shouldRefreshHLS(u, hlsMinInterval) { + continue + } + + // HLS holen (kurzer Timeout – soll /online nicht blockieren) + ctx, cancel := context.WithTimeout(r.Context(), 8*time.Second) + newHls, err := fetchCurrentBestHLS(ctx, rm.Username, cookieHeader, reqUA) + cancel() + if err != nil || strings.TrimSpace(newHls) == "" { + continue + } + + // Jobs aktualisieren + ggf. Preview stoppen + refreshRunningJobsHLS(u, newHls, cookieHeader, reqUA) + } + } + // --------------------------- // Persist "last seen online/offline" für explizit angefragte User // --------------------------- diff --git a/backend/generated_gc.go b/backend/generated_gc.go deleted file mode 100644 index 35ea742..0000000 --- a/backend/generated_gc.go +++ /dev/null @@ -1,185 +0,0 @@ -// backend\generated_gc.go - -package main - -import ( - "fmt" - "io/fs" - "os" - "path/filepath" - "strings" - "sync/atomic" - "time" -) - -var generatedGCRunning int32 - -type generatedGCStats struct { - Checked int - Removed int -} - -// Läuft synchron und liefert Zahlen zurück (für /api/settings/cleanup Response). -func triggerGeneratedGarbageCollectorSync() generatedGCStats { - // nur 1 GC gleichzeitig - if !atomic.CompareAndSwapInt32(&generatedGCRunning, 0, 1) { - fmt.Println("🧹 [gc] skip: already running") - return generatedGCStats{} - } - defer atomic.StoreInt32(&generatedGCRunning, 0) - - stats := runGeneratedGarbageCollector() - return stats -} - -// Läuft 1× nach Serverstart (mit Delay), löscht /generated/* Orphans. -func startGeneratedGarbageCollector() { - go func() { - time.Sleep(3 * time.Second) - triggerGeneratedGarbageCollectorSync() - }() -} - -// Core-Logik ohne Delay (für manuelle Trigger, z.B. nach Cleanup) -// Liefert Stats zurück, damit /api/settings/cleanup die Zahlen anzeigen kann. -func runGeneratedGarbageCollector() generatedGCStats { - stats := generatedGCStats{} - - s := getSettings() - - doneAbs, err := resolvePathRelativeToApp(s.DoneDir) - if err != nil { - fmt.Println("🧹 [gc] resolve doneDir failed:", err) - return stats - } - doneAbs = strings.TrimSpace(doneAbs) - if doneAbs == "" { - return stats - } - - // 1) Live-IDs sammeln: alle mp4/ts unter /done (rekursiv), .trash ignorieren - live := make(map[string]struct{}, 4096) - - _ = filepath.WalkDir(doneAbs, func(p string, d fs.DirEntry, err error) error { - if err != nil { - return nil - } - - name := d.Name() - - if d.IsDir() { - if strings.EqualFold(name, ".trash") { - return fs.SkipDir - } - return nil - } - - ext := strings.ToLower(filepath.Ext(name)) - if ext != ".mp4" && ext != ".ts" { - return nil - } - - info, err := d.Info() - if err != nil || info.IsDir() || info.Size() <= 0 { - return nil - } - - base := strings.TrimSuffix(name, ext) - id, err := sanitizeID(stripHotPrefix(base)) - if err != nil || id == "" { - return nil - } - - live[id] = struct{}{} - return nil - }) - - // 2) /generated/meta/ prüfen - metaRoot, err := generatedMetaRoot() - if err == nil { - metaRoot = strings.TrimSpace(metaRoot) - } - if err != nil || metaRoot == "" { - return stats - } - - removedMeta := 0 - checkedMeta := 0 - - if entries, err := os.ReadDir(metaRoot); err == nil { - for _, e := range entries { - if !e.IsDir() { - continue - } - id := strings.TrimSpace(e.Name()) - if id == "" || strings.HasPrefix(id, ".") { - continue - } - - checkedMeta++ - if _, ok := live[id]; ok { - continue - } - - removeGeneratedForID(id) - removedMeta++ - } - } - - fmt.Printf("🧹 [gc] generated/meta checked=%d removed_orphans=%d\n", checkedMeta, removedMeta) - stats.Checked += checkedMeta - stats.Removed += removedMeta - - // 3) Optional: legacy /generated/ - genRoot, err := generatedRoot() - if err == nil { - genRoot = strings.TrimSpace(genRoot) - } - if err != nil || genRoot == "" { - return stats - } - - reserved := map[string]struct{}{ - "meta": {}, - "covers": {}, - "cover": {}, - "temp": {}, - "tmp": {}, - ".trash": {}, - } - - removedLegacy := 0 - checkedLegacy := 0 - - if entries, err := os.ReadDir(genRoot); err == nil { - for _, e := range entries { - if !e.IsDir() { - continue - } - - name := strings.TrimSpace(e.Name()) - if name == "" || strings.HasPrefix(name, ".") { - continue - } - if _, ok := reserved[strings.ToLower(name)]; ok { - continue - } - - checkedLegacy++ - if _, ok := live[name]; ok { - continue - } - - removeGeneratedForID(name) - removedLegacy++ - } - } - - if checkedLegacy > 0 || removedLegacy > 0 { - fmt.Printf("🧹 [gc] generated legacy checked=%d removed_orphans=%d\n", checkedLegacy, removedLegacy) - } - stats.Checked += checkedLegacy - stats.Removed += removedLegacy - - return stats -} diff --git a/backend/http_teaser.go b/backend/http_teaser.go deleted file mode 100644 index 72100d3..0000000 --- a/backend/http_teaser.go +++ /dev/null @@ -1,202 +0,0 @@ -package main - -import ( - "context" - "fmt" - "math/rand" - "net/http" - "os" - "os/exec" - "path/filepath" - "strings" - "time" -) - -func serveTeaserFile(w http.ResponseWriter, r *http.Request, path string) { - f, err := openForReadShareDelete(path) - if err != nil { - http.Error(w, "datei öffnen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) - return - } - defer f.Close() - - fi, err := f.Stat() - if err != nil || fi.IsDir() || fi.Size() == 0 { - http.Error(w, "datei nicht gefunden", http.StatusNotFound) - return - } - - w.Header().Set("Cache-Control", "public, max-age=31536000") - w.Header().Set("Content-Type", "video/mp4") - http.ServeContent(w, r, filepath.Base(path), fi.ModTime(), f) -} - -// tolerante Input-Flags für kaputte/abgeschnittene H264/TS Streams -var ffmpegInputTol = []string{ - "-fflags", "+discardcorrupt+genpts", - "-err_detect", "ignore_err", - "-max_error_rate", "1.0", -} - -var coverModelStore *ModelStore - -func setCoverModelStore(s *ModelStore) { - coverModelStore = s - // random seed (einmalig) - rand.Seed(time.Now().UnixNano()) -} - -func generateTeaserMP4(ctx context.Context, srcPath, outPath string, startSec, durSec float64) error { - if durSec <= 0 { - durSec = 8 - } - if startSec < 0 { - startSec = 0 - } - - // temp schreiben -> rename - tmp := outPath + ".tmp.mp4" - - args := []string{ - "-y", - "-hide_banner", - "-loglevel", "error", - } - args = append(args, ffmpegInputTol...) - args = append(args, - "-ss", fmt.Sprintf("%.3f", startSec), - "-i", srcPath, - "-t", fmt.Sprintf("%.3f", durSec), - - // Video - "-vf", "scale=720:-2", - "-map", "0:v:0", - - // Audio (optional: falls kein Audio vorhanden ist, bricht ffmpeg NICHT ab) - "-map", "0:a:0", - "-c:a", "aac", - "-b:a", "128k", - "-ac", "2", - - "-c:v", "libx264", - "-preset", "veryfast", - "-crf", "28", - "-pix_fmt", "yuv420p", - - // Wenn Audio minimal kürzer/länger ist, sauber beenden - "-shortest", - - "-movflags", "+faststart", - "-f", "mp4", - tmp, - ) - - cmd := exec.CommandContext(ctx, ffmpegPath, args...) - if out, err := cmd.CombinedOutput(); err != nil { - _ = os.Remove(tmp) - return fmt.Errorf("ffmpeg teaser failed: %v (%s)", err, strings.TrimSpace(string(out))) - } - - _ = os.Remove(outPath) - return os.Rename(tmp, outPath) -} - -func generatedTeaser(w http.ResponseWriter, r *http.Request) { - id := strings.TrimSpace(r.URL.Query().Get("id")) - if id == "" { - http.Error(w, "id fehlt", http.StatusBadRequest) - return - } - - var err error - id, err = sanitizeID(id) - if err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return - } - - outPath, err := findFinishedFileByID(id) - if err != nil { - http.Error(w, "preview nicht verfügbar", http.StatusNotFound) - return - } - - if err := ensureGeneratedDirs(); err != nil { - http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) - return - } - - assetID := stripHotPrefix(id) - if assetID == "" { - assetID = id - } - - assetDir, err := ensureGeneratedDir(assetID) - if err != nil { - http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) - return - } - - previewPath := filepath.Join(assetDir, "preview.mp4") - - // ✅ NEU: noGenerate=1 -> niemals on-the-fly erzeugen, nur liefern wenn vorhanden - qNoGen := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("noGenerate"))) - noGen := qNoGen == "1" || qNoGen == "true" || qNoGen == "yes" - - // Cache hit (neu) - if fi, err := os.Stat(previewPath); err == nil && !fi.IsDir() && fi.Size() > 0 { - serveTeaserFile(w, r, previewPath) - return - } - - // Legacy: generated/teaser/_teaser.mp4 oder .mp4 - if teaserLegacy, _ := generatedTeaserRoot(); strings.TrimSpace(teaserLegacy) != "" { - cids := []string{assetID, id} - for _, cid := range cids { - candidates := []string{ - filepath.Join(teaserLegacy, cid+"_teaser.mp4"), - filepath.Join(teaserLegacy, cid+".mp4"), - } - for _, c := range candidates { - if fi, err := os.Stat(c); err == nil && !fi.IsDir() && fi.Size() > 0 { - if _, err2 := os.Stat(previewPath); os.IsNotExist(err2) { - _ = os.MkdirAll(filepath.Dir(previewPath), 0o755) - _ = os.Rename(c, previewPath) - } - if fi2, err2 := os.Stat(previewPath); err2 == nil && !fi2.IsDir() && fi2.Size() > 0 { - serveTeaserFile(w, r, previewPath) - return - } - serveTeaserFile(w, r, c) - return - } - } - } - } - - // ✅ NEU: wenn noGenerate aktiv und bisher kein Teaser gefunden -> 404 - if noGen { - http.Error(w, "preview nicht verfügbar", http.StatusNotFound) - return - } - - // Neu erzeugen - if err := genSem.Acquire(r.Context()); err != nil { - http.Error(w, "abgebrochen: "+err.Error(), http.StatusRequestTimeout) - return - } - defer genSem.Release() - - genCtx, cancel := context.WithTimeout(r.Context(), 3*time.Minute) - defer cancel() - - if err := generateTeaserClipsMP4(genCtx, outPath, previewPath, 1.0, 18); err != nil { - // Fallback: einzelner kurzer Teaser ab Anfang (trifft seltener kaputte Stellen) - if err2 := generateTeaserMP4(genCtx, outPath, previewPath, 0, 8); err2 != nil { - http.Error(w, "konnte preview nicht erzeugen: "+err.Error()+" (fallback ebenfalls fehlgeschlagen: "+err2.Error()+")", http.StatusInternalServerError) - return - } - } - - serveTeaserFile(w, r, previewPath) -} diff --git a/backend/live.go b/backend/live.go new file mode 100644 index 0000000..f801f93 --- /dev/null +++ b/backend/live.go @@ -0,0 +1,674 @@ +// backend/live.go +package main + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + "net/http" + "net/url" + "os" + "os/exec" + "path" + "path/filepath" + "regexp" + "strings" + "time" +) + +// ============================================================ +// HLS Live Preview serving (+ m3u8 rewrite) +// ============================================================ +// +// This file contains everything related to the HLS live preview stream: +// - serving index*.m3u8 + segment files from a job's PreviewDir +// - rewriting m3u8 segment URLs to a configurable base path +// - starting/stopping the ffmpeg HLS preview process (per job) +// - hover/play activation checks + preview "touch" + ensure-start logic +// +// It intentionally reuses existing globals/types from your backend (package main): +// - jobs, jobsMu, RecordJob, JobRunning +// - ffmpegPath, previewSem +// - notifyJobsChanged() +// - assetIDForJob(job *RecordJob) string +// - startLiveThumbWebPLoop(ctx, job) +// ============================================================ + +// Allowed files that may be served out of PreviewDir. +var previewFileRe = regexp.MustCompile(`^(index(_hq)?\.m3u8|seg_(low|hq)_\d+\.ts|seg_\d+\.ts|init\.m4s|\w+\.m4s)$`) + +func serveLiveNotReady(w http.ResponseWriter, r *http.Request) { + // ✅ Für HLS-Clients (hls.js) ist 204 beim Manifest "ein Fehler" -> aggressive Retries. + // Deshalb: IMMER 200 + gültige (aber leere) m3u8 zurückgeben. + w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8") + w.Header().Set("Cache-Control", "no-store") + w.Header().Set("X-Content-Type-Options", "nosniff") + w.Header().Set("Retry-After", "1") + + if r.Method == http.MethodHead { + w.WriteHeader(http.StatusOK) + return + } + + body := "#EXTM3U\n" + + "#EXT-X-VERSION:3\n" + + "#EXT-X-TARGETDURATION:2\n" + + "#EXT-X-MEDIA-SEQUENCE:0\n" + + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(body)) +} + +// stopPreview stops the running ffmpeg HLS preview process for a job and resets state. +func stopPreview(job *RecordJob) { + jobsMu.Lock() + cmd := job.previewCmd + cancel := job.previewCancel + job.previewCmd = nil + job.previewCancel = nil + job.LiveThumbStarted = false + job.PreviewDir = "" + jobsMu.Unlock() + + if cancel != nil { + cancel() + } + if cmd != nil && cmd.Process != nil { + _ = cmd.Process.Kill() + } +} + +func recordPreviewLive(w http.ResponseWriter, r *http.Request) { + // identisch zu /api/preview, aber m3u8 rewriting soll auf /api/preview/live zeigen + recordPreviewWithBase(w, r, "/api/preview/live") +} + +// servePreviewHLSFileWithBase serves a single HLS file (index/segment) for a job. +// If it's an m3u8, it is rewritten so that segment URIs point at basePath. +func servePreviewHLSFileWithBase(w http.ResponseWriter, r *http.Request, id, file, basePath string) { + file = strings.TrimSpace(file) + if file == "" || filepath.Base(file) != file || !previewFileRe.MatchString(file) { + http.Error(w, "ungültige file", http.StatusBadRequest) + return + } + + isIndex := file == "index.m3u8" || file == "index_hq.m3u8" + + jobsMu.Lock() + job, ok := jobs[id] + state := "" + if ok && job != nil { + state = strings.TrimSpace(job.PreviewState) + } + jobsMu.Unlock() + + // HEAD: quick existence check + if r.Method == http.MethodHead { + if !ok || job == nil { + w.WriteHeader(http.StatusNotFound) + return + } + if state == "private" { + w.WriteHeader(http.StatusForbidden) + return + } + if state == "offline" { + w.WriteHeader(http.StatusNotFound) + return + } + previewDir := strings.TrimSpace(job.PreviewDir) + if previewDir == "" { + w.WriteHeader(http.StatusNotFound) + return + } + p := filepath.Join(previewDir, file) + if st, err := os.Stat(p); err == nil && !st.IsDir() { + w.Header().Set("Cache-Control", "no-store") + w.WriteHeader(http.StatusOK) + return + } + w.WriteHeader(http.StatusNotFound) + return + } + + // activation: hover or play=1 + active := isHover(r) || strings.TrimSpace(r.URL.Query().Get("play")) == "1" + if !active { + if isIndex { + serveLiveNotReady(w, r) + return + } + http.Error(w, "preview not active", http.StatusNotFound) + return + } + + if !ok || job == nil { + if isIndex { + serveLiveNotReady(w, r) + return + } + http.Error(w, "job nicht gefunden", http.StatusNotFound) + return + } + + ensurePreviewStarted(r, job) + touchPreview(job) + + jobsMu.Lock() + state = strings.TrimSpace(job.PreviewState) + jobsMu.Unlock() + + if state == "private" { + http.Error(w, "model private", http.StatusForbidden) + return + } + if state == "offline" { + http.Error(w, "model offline", http.StatusNotFound) + return + } + if state == "error" { + http.Error(w, "preview error", http.StatusServiceUnavailable) + return + } + + previewDir := strings.TrimSpace(job.PreviewDir) + if previewDir == "" { + if isIndex { + serveLiveNotReady(w, r) + return + } + http.Error(w, "preview nicht verfügbar", http.StatusNotFound) + return + } + + p := filepath.Join(previewDir, file) + st, err := os.Stat(p) + if err != nil || st.IsDir() { + if isIndex { + serveLiveNotReady(w, r) + return + } + http.Error(w, "datei nicht gefunden", http.StatusNotFound) + return + } + + ext := strings.ToLower(filepath.Ext(p)) + w.Header().Set("Cache-Control", "no-store") + w.Header().Set("X-Accel-Buffering", "no") + + // m3u8 -> rewrite + if ext == ".m3u8" { + raw, err := os.ReadFile(p) + if err != nil { + http.Error(w, "m3u8 read failed", http.StatusInternalServerError) + return + } + rewritten := rewriteM3U8WithBase(raw, id, basePath) + w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8") + w.WriteHeader(http.StatusOK) + _, _ = w.Write(rewritten) + return + } + + switch ext { + case ".ts": + w.Header().Set("Content-Type", "video/mp2t") + case ".m4s": + w.Header().Set("Content-Type", "video/iso.segment") + default: + w.Header().Set("Content-Type", "application/octet-stream") + } + + // segments may still be written -> wait until size stabilizes + if ext == ".ts" || ext == ".m4s" { + if !waitForStableFile(p, 2, 120*time.Millisecond) { + http.Error(w, "segment not ready", http.StatusNotFound) + return + } + } + + f, err := os.Open(p) + if err != nil { + http.Error(w, "open failed", http.StatusNotFound) + return + } + defer f.Close() + + http.ServeContent(w, r, file, st.ModTime(), f) +} + +func waitForStableFile(path string, checks int, interval time.Duration) bool { + var last int64 = -1 + for i := 0; i < checks; i++ { + st, err := os.Stat(path) + if err != nil || st.IsDir() { + return false + } + sz := st.Size() + if last >= 0 && sz == last { + return true + } + last = sz + time.Sleep(interval) + } + return false +} + +func classifyPreviewFFmpegStderr(stderr string) (state string, httpStatus int) { + s := strings.ToLower(stderr) + if strings.Contains(s, "403 forbidden") || strings.Contains(s, "http error 403") || strings.Contains(s, "server returned 403") { + return "private", http.StatusForbidden + } + if strings.Contains(s, "404 not found") || strings.Contains(s, "http error 404") || strings.Contains(s, "server returned 404") { + return "offline", http.StatusNotFound + } + return "", 0 +} + +// startPreviewHLS starts ffmpeg to generate HLS segments in previewDir. +// It also starts your existing live-thumb loop: startLiveThumbWebPLoop(ctx, job). +func startPreviewHLS(ctx context.Context, job *RecordJob, m3u8URL, previewDir, httpCookie, userAgent string) error { + if strings.TrimSpace(ffmpegPath) == "" { + return fmt.Errorf("kein ffmpeg gefunden – setze FFMPEG_PATH oder lege ffmpeg(.exe) neben das Backend") + } + if err := os.MkdirAll(previewDir, 0o755); err != nil { + return err + } + + jobsMu.Lock() + job.PreviewState = "" + job.PreviewStateAt = "" + job.PreviewStateMsg = "" + jobsMu.Unlock() + notifyJobsChanged() + + commonIn := []string{"-y"} + if strings.TrimSpace(userAgent) != "" { + commonIn = append(commonIn, "-user_agent", userAgent) + } + if strings.TrimSpace(httpCookie) != "" { + commonIn = append(commonIn, "-headers", fmt.Sprintf("Cookie: %s\r\n", httpCookie)) + } + commonIn = append(commonIn, "-i", m3u8URL) + + hqArgs := append(commonIn, + "-vf", "scale=480:-2", + "-c:v", "libx264", "-preset", "veryfast", "-tune", "zerolatency", + "-pix_fmt", "yuv420p", + "-profile:v", "main", + "-level", "3.1", + "-threads", "4", + "-g", "48", "-keyint_min", "48", "-sc_threshold", "0", + "-map", "0:v:0", + "-map", "0:a:0?", + "-c:a", "aac", "-b:a", "128k", "-ac", "2", + "-f", "hls", + "-hls_time", "2", + "-hls_list_size", "6", + "-hls_allow_cache", "0", + "-hls_flags", "delete_segments+append_list+independent_segments+temp_file", + "-hls_segment_filename", filepath.Join(previewDir, "seg_hq_%05d.ts"), + filepath.Join(previewDir, "index_hq.m3u8"), + ) + + cmd := exec.CommandContext(ctx, ffmpegPath, hqArgs...) + var stderr bytes.Buffer + cmd.Stderr = &stderr + + jobsMu.Lock() + job.previewCmd = cmd + jobsMu.Unlock() + + go func() { + if err := previewSem.Acquire(ctx); err != nil { + jobsMu.Lock() + if job.previewCmd == cmd { + job.previewCmd = nil + } + jobsMu.Unlock() + return + } + defer previewSem.Release() + + if err := cmd.Run(); err != nil && ctx.Err() == nil { + st := strings.TrimSpace(stderr.String()) + state, code := classifyPreviewFFmpegStderr(st) + + jobsMu.Lock() + if state != "" { + job.PreviewState = state + job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano) + job.PreviewStateMsg = fmt.Sprintf("ffmpeg input returned HTTP %d", code) + } else { + job.PreviewState = "error" + job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano) + if len(st) > 280 { + job.PreviewStateMsg = st[:280] + "…" + } else { + job.PreviewStateMsg = st + } + } + jobsMu.Unlock() + notifyJobsChanged() + + fmt.Printf("⚠️ preview hq ffmpeg failed: %v (%s)\n", err, st) + } + + jobsMu.Lock() + if job.previewCmd == cmd { + job.previewCmd = nil + } + jobsMu.Unlock() + }() + + startLiveThumbWebPLoop(ctx, job) + return nil +} + +// rewriteM3U8WithBase rewrites all segment URIs inside an m3u8 to point at basePath. +// +// Example output line: +// +// /api/preview/live?id=&file=seg_hq_00001.ts&play=1 +func rewriteM3U8WithBase(raw []byte, id string, basePath string) []byte { + basePath = strings.TrimSpace(basePath) + if basePath == "" { + basePath = "/api/preview" + } + if !strings.HasPrefix(basePath, "/") { + basePath = "/" + basePath + } + + base := basePath + "?id=" + url.QueryEscape(id) + "&file=" + + var out bytes.Buffer + sc := bufio.NewScanner(bytes.NewReader(raw)) + for sc.Scan() { + line := sc.Text() + trim := strings.TrimSpace(line) + if trim == "" { + out.WriteByte('\n') + continue + } + + // tags: may contain URI="..." + if strings.HasPrefix(trim, "#") { + line = rewriteAttrURIWithBase(line, base, basePath) + out.WriteString(line) + out.WriteByte('\n') + continue + } + + u := trim + + // absolute URLs: keep + if strings.HasPrefix(u, "http://") || strings.HasPrefix(u, "https://") { + out.WriteString(line) + out.WriteByte('\n') + continue + } + + // already points to our endpoint: keep + if strings.Contains(u, basePath) || strings.Contains(u, "/api/preview") { + out.WriteString(line) + out.WriteByte('\n') + continue + } + + name := path.Base(u) + out.WriteString(base + url.QueryEscape(name) + "&play=1") + out.WriteByte('\n') + } + if err := sc.Err(); err != nil { + return raw + } + return out.Bytes() +} + +func rewriteAttrURIWithBase(line, base string, basePath string) string { + const key = `URI="` + i := strings.Index(line, key) + if i < 0 { + return line + } + j := strings.Index(line[i+len(key):], `"`) + if j < 0 { + return line + } + start := i + len(key) + end := start + j + val := line[start:end] + valTrim := strings.TrimSpace(val) + + // keep absolute or already-rewritten URIs + if strings.HasPrefix(valTrim, "http://") || strings.HasPrefix(valTrim, "https://") { + return line + } + if strings.Contains(valTrim, basePath) || strings.Contains(valTrim, "/api/preview") { + return line + } + + name := path.Base(valTrim) + repl := base + url.QueryEscape(name) + "&play=1" + return line[:start] + repl + line[end:] +} + +// isHover decides whether this request should count as "active". +func isHover(r *http.Request) bool { + v := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("hover"))) + return v == "1" || v == "true" || v == "yes" +} + +// touchPreview updates the last-hit timestamp so your cleanup/stop logic can use it. +func touchPreview(job *RecordJob) { + if job == nil { + return + } + jobsMu.Lock() + job.previewLastHit = time.Now() + jobsMu.Unlock() +} + +// ensurePreviewStarted starts the ffmpeg HLS preview if not running yet. +func ensurePreviewStarted(r *http.Request, job *RecordJob) { + if job == nil { + return + } + job.previewStartMu.Lock() + defer job.previewStartMu.Unlock() + + jobsMu.Lock() + if job.previewCmd != nil && job.PreviewDir != "" { + job.previewLastHit = time.Now() + jobsMu.Unlock() + return + } + m3u8 := strings.TrimSpace(job.PreviewM3U8) + cookie := strings.TrimSpace(job.PreviewCookie) + ua := strings.TrimSpace(job.PreviewUA) + jobsMu.Unlock() + + if m3u8 == "" { + return + } + + pctx, cancel := context.WithCancel(context.Background()) + assetID := assetIDForJob(job) + pdir := filepath.Join(os.TempDir(), "rec_preview", assetID) + + jobsMu.Lock() + job.PreviewDir = pdir + job.previewCancel = cancel + job.previewLastHit = time.Now() + jobsMu.Unlock() + + _ = startPreviewHLS(pctx, job, m3u8, pdir, cookie, ua) +} + +// ============================================================ +// Live fMP4 (single request, chunked) via ffmpeg -> stdout +// Route: /api/preview/live-fmp4?id=&hover=1 +// ============================================================ + +func recordPreviewLiveFMP4(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet { + http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed) + return + } + + id := strings.TrimSpace(r.URL.Query().Get("id")) + if id == "" { + http.Error(w, "id fehlt", http.StatusBadRequest) + return + } + + // activation: hover or play=1 (wie bei HLS) + active := isHover(r) || strings.TrimSpace(r.URL.Query().Get("play")) == "1" + if !active { + http.Error(w, "preview not active", http.StatusNotFound) + return + } + + jobsMu.Lock() + job, ok := jobs[id] + state := "" + if ok && job != nil { + state = strings.TrimSpace(job.PreviewState) + } + jobsMu.Unlock() + + if !ok || job == nil { + http.Error(w, "job nicht gefunden", http.StatusNotFound) + return + } + + // ensure ffmpeg preview input data exists + // (PreviewM3U8 + Cookie/UA werden beim Job gesetzt) + m3u8 := strings.TrimSpace(job.PreviewM3U8) + if m3u8 == "" { + http.Error(w, "preview m3u8 fehlt", http.StatusNotFound) + return + } + + // states + if state == "private" { + http.Error(w, "model private", http.StatusForbidden) + return + } + if state == "offline" { + http.Error(w, "model offline", http.StatusNotFound) + return + } + if state == "error" { + http.Error(w, "preview error", http.StatusServiceUnavailable) + return + } + + // Headers: fMP4 stream + w.Header().Set("Content-Type", `video/mp4`) + w.Header().Set("Cache-Control", "no-store") + w.Header().Set("X-Accel-Buffering", "no") + + // Sehr wichtig: Flushbar? + flusher, okf := w.(http.Flusher) + if !okf { + http.Error(w, "Streaming nicht unterstützt", http.StatusInternalServerError) + return + } + + // Client disconnect => ffmpeg stoppen + ctx := r.Context() + + // Cookie/UA aus Job + cookie := strings.TrimSpace(job.PreviewCookie) + ua := strings.TrimSpace(job.PreviewUA) + if ua == "" { + ua = "Mozilla/5.0" + } + + // ffmpeg args: input = m3u8, output = fragmented mp4 to stdout + // ✅ nur Video (kein Audio), damit MSE codecs stabil sind + args := []string{"-hide_banner", "-loglevel", "error"} + if ua != "" { + args = append(args, "-user_agent", ua) + } + if cookie != "" { + args = append(args, "-headers", fmt.Sprintf("Cookie: %s\r\n", cookie)) + } + + // Input + args = append(args, "-i", m3u8) + + // Video encode (low-latency-ish) + args = append(args, + "-an", + "-vf", "scale=480:-2", + "-c:v", "libx264", + "-preset", "veryfast", + "-tune", "zerolatency", + "-pix_fmt", "yuv420p", + "-profile:v", "main", + "-level", "3.1", + "-g", "48", + "-keyint_min", "48", + "-sc_threshold", "0", + ) + + // Output: fMP4 fragmented to stdout (single HTTP response) + args = append(args, + "-f", "mp4", + "-movflags", "frag_keyframe+empty_moov+default_base_moof", + "-frag_duration", "2000000", // 2s (µs) + "-min_frag_duration", "2000000", + "pipe:1", + ) + + cmd := exec.CommandContext(ctx, ffmpegPath, args...) + + // stdout -> response + stdout, err := cmd.StdoutPipe() + if err != nil { + http.Error(w, "ffmpeg stdout pipe failed", http.StatusInternalServerError) + return + } + + // stderr nur für Debug (optional) + var stderr bytes.Buffer + cmd.Stderr = &stderr + + // Start + if err := cmd.Start(); err != nil { + http.Error(w, "ffmpeg start failed: "+err.Error(), http.StatusInternalServerError) + return + } + + // Wenn Client weg => Prozess killt CommandContext sowieso (ctx cancels), + // aber wir kopieren streaming-mäßig. + buf := make([]byte, 32*1024) + + for { + select { + case <-ctx.Done(): + _ = cmd.Process.Kill() + return + default: + } + + n, rerr := stdout.Read(buf) + if n > 0 { + _, _ = w.Write(buf[:n]) + flusher.Flush() + } + if rerr != nil { + if rerr == io.EOF { + break + } + break + } + } + + // Wait (verhindert Zombies) + _ = cmd.Wait() +} diff --git a/backend/main.go b/backend/main.go index 3aebd2b..7aea3b1 100644 --- a/backend/main.go +++ b/backend/main.go @@ -109,6 +109,35 @@ type ffprobeInfo struct { Streams []ffprobeStreamInfo `json:"streams"` } +func jobMatchesModelKey(j *RecordJob, modelKey string) bool { + if j == nil { + return false + } + mk := strings.ToLower(strings.TrimSpace(modelKey)) + if mk == "" { + return false + } + + // 1) Output-Name (bei dir: _MM_DD_YYYY__HH-MM-SS...) + out := strings.TrimSpace(j.Output) + if out != "" { + stem := strings.TrimSuffix(filepath.Base(out), filepath.Ext(out)) + // modelNameFromFilename() hast du schon irgendwo (wird in modelKeyFromFilenameOrPath benutzt) + guess := strings.ToLower(strings.TrimSpace(modelNameFromFilename(stripHotPrefix(stem)))) + if guess == mk { + return true + } + } + + // 2) Fallback: SourceURL enthält / + src := strings.ToLower(strings.TrimSpace(j.SourceURL)) + if src != "" && strings.Contains(src, "/"+mk) { + return true + } + + return false +} + func parseFFRate(s string) float64 { s = strings.TrimSpace(s) if s == "" || s == "0/0" { diff --git a/backend/models_store.go b/backend/models_store.go index c41d25d..c25bf4d 100644 --- a/backend/models_store.go +++ b/backend/models_store.go @@ -158,8 +158,8 @@ func (s *ModelStore) init() error { return err } - db.SetMaxOpenConns(5) - db.SetMaxIdleConns(5) + db.SetMaxOpenConns(10) + db.SetMaxIdleConns(10) if err := db.Ping(); err != nil { _ = db.Close() diff --git a/backend/postwork_queue.go b/backend/postwork.go similarity index 87% rename from backend/postwork_queue.go rename to backend/postwork.go index d276177..e9b0979 100644 --- a/backend/postwork_queue.go +++ b/backend/postwork.go @@ -1,8 +1,10 @@ -// backend/postwork_queue.go +// backend/postwork.go package main import ( "context" + "reflect" + "strings" "sync" "time" ) @@ -199,4 +201,39 @@ func (pq *PostWorkQueue) StatusForKey(key string) PostWorkKeyStatus { } // global (oder in deinem app struct halten) -var postWorkQ = NewPostWorkQueue(512, 4) // maxParallelFFmpeg = 2 +var postWorkQ = NewPostWorkQueue(512, 4) // maxParallelFFmpeg = 4 + +// --- Status Refresher (ehemals postwork_refresh.go) --- + +func startPostWorkStatusRefresher() { + t := time.NewTicker(1 * time.Second) + go func() { + defer t.Stop() + + for range t.C { + changed := false + + jobsMu.Lock() + for _, job := range jobs { + key := strings.TrimSpace(job.PostWorkKey) + if key == "" { + continue + } + + st := postWorkQ.StatusForKey(key) + + // ✅ Kein Typname nötig: job.PostWork ist *, st ist + if job.PostWork == nil || !reflect.DeepEqual(*job.PostWork, st) { + tmp := st + job.PostWork = &tmp + changed = true + } + } + jobsMu.Unlock() + + if changed { + notifyJobsChanged() + } + } + }() +} diff --git a/backend/postwork_refresh.go b/backend/postwork_refresh.go deleted file mode 100644 index 1ad0169..0000000 --- a/backend/postwork_refresh.go +++ /dev/null @@ -1,42 +0,0 @@ -// backend\postwork_refresh.go - -package main - -import ( - "reflect" - "strings" - "time" -) - -func startPostWorkStatusRefresher() { - t := time.NewTicker(1 * time.Second) - go func() { - defer t.Stop() - - for range t.C { - changed := false - - jobsMu.Lock() - for _, job := range jobs { - key := strings.TrimSpace(job.PostWorkKey) - if key == "" { - continue - } - - st := postWorkQ.StatusForKey(key) - - // ✅ Kein Typname nötig: job.PostWork ist *, st ist - if job.PostWork == nil || !reflect.DeepEqual(*job.PostWork, st) { - tmp := st - job.PostWork = &tmp - changed = true - } - } - jobsMu.Unlock() - - if changed { - notifyJobsChanged() - } - } - }() -} diff --git a/backend/preview.go b/backend/preview.go new file mode 100644 index 0000000..cda4a7b --- /dev/null +++ b/backend/preview.go @@ -0,0 +1,2286 @@ +// backend/preview.go +package main + +import ( + "bufio" + "bytes" + "context" + "crypto/sha1" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "html" + "image" + "image/color" + "image/draw" + "image/jpeg" + "image/png" + "io" + "log" + "math" + "math/rand" + "net/http" + "net/url" + "os" + "os/exec" + "path" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + "sync" + "time" + + "golang.org/x/image/font" + "golang.org/x/image/font/basicfont" + "golang.org/x/image/math/fixed" +) + +// NOTE: +// Diese Datei ist ein "Zusammenzug" deiner bisherigen preview_* Dateien. +// Sie referenziert weiterhin vorhandene Functions/Globals aus deinem Backend, z.B.: +// - resolvePathRelativeToApp, getSettings, ensureAssetsForVideo, generatedThumbFile +// - atomicWriteFile, ensureGeneratedDirs, ensureGeneratedDir +// - durationSecondsCached, parseFFmpegOutTime, ffmpegInputTol +// - jobs, jobsMu, RecordJob, previewSem, thumbSem, JobRunning, notifyJobsChanged +// - sanitizeID, findFinishedFileByID, stripHotPrefix, assetIDForJob, generatedThumbWebPFile +// Bitte diese Abhängigkeiten NICHT löschen – preview.go nutzt sie. + +// ============================================================ +// Shared wiring +// ============================================================ + +// coverModelStore wird von routes.go gesetzt (du rufst setCoverModelStore(store)). +var coverModelStore *ModelStore + +func setCoverModelStore(s *ModelStore) { coverModelStore = s } + +var errCoverNotSupported = errors.New("cover not supported") + +// ============================================================ +// Covers: generated/covers/. + info.json +// Routes: +// - /api/generated/cover?category=...&refresh=1&model=...&src=... +// - /api/generated/coverinfo/list +// ============================================================ + +type coverInfo struct { + Category string `json:"category"` + Model string `json:"model,omitempty"` + Src string `json:"src,omitempty"` + GeneratedAt string `json:"generatedAt"` +} + +func normalizeCoverSrc(s string) string { + s = strings.TrimSpace(s) + if s == "" { + return "" + } + // Windows -> URL-artig + s2 := strings.ReplaceAll(s, "\\", "/") + + // Wenn es schon wie ein Web-Pfad aussieht, so lassen + if strings.HasPrefix(s2, "/generated/") || strings.HasPrefix(s2, "http://") || strings.HasPrefix(s2, "https://") { + return s2 + } + + // Wenn es ein lokaler Pfad ist, versuche den /generated/ Teil zu extrahieren + if i := strings.Index(s2, "/generated/"); i >= 0 { + return s2[i:] + } + return s2 +} + +func coversRoot() (string, error) { + return resolvePathRelativeToApp(filepath.Join("generated", "covers")) +} + +func ensureCoversDir() (string, error) { + root, err := coversRoot() + if err != nil { + return "", err + } + if strings.TrimSpace(root) == "" { + return "", fmt.Errorf("covers root ist leer") + } + if err := os.MkdirAll(root, 0o755); err != nil { + return "", err + } + return root, nil +} + +func coverInfoPathForKey(key string) (string, error) { + root, err := coversRoot() + if err != nil { + return "", err + } + return filepath.Join(root, key+".info.json"), nil +} + +func writeCoverInfoBestEffort(key string, info coverInfo) { + p, err := coverInfoPathForKey(key) + if err != nil { + return + } + + b, err := json.MarshalIndent(info, "", " ") + if err != nil { + return + } + _ = os.MkdirAll(filepath.Dir(p), 0o755) + _ = os.WriteFile(p, b, 0o644) +} + +func readCoverInfoBestEffort(key string) (coverInfo, bool) { + p, err := coverInfoPathForKey(key) + if err != nil { + return coverInfo{}, false + } + b, err := os.ReadFile(p) + if err != nil || len(b) == 0 { + return coverInfo{}, false + } + var ci coverInfo + if json.Unmarshal(b, &ci) != nil { + return coverInfo{}, false + } + return ci, true +} + +func drawLabel(img draw.Image, text string) { + text = strings.TrimSpace(text) + if text == "" { + return + } + + face := basicfont.Face7x13 + + // Layout + const margin = 10 + const padX = 10 + const padY = 8 + + b := img.Bounds() + + maxTextW := (b.Dx() - 2*margin) - 2*padX + if maxTextW <= 0 { + return + } + + measure := func(s string) int { + d := &font.Drawer{Face: face} + return d.MeasureString(s).Ceil() + } + + label := text + if w := measure(label); w > maxTextW { + ellipsis := "…" + rs := []rune(text) + if len(rs) == 0 { + return + } + lo, hi := 0, len(rs) + best := "" + for lo <= hi { + mid := (lo + hi) / 2 + cand := string(rs[:mid]) + ellipsis + if measure(cand) <= maxTextW { + best = cand + lo = mid + 1 + } else { + hi = mid - 1 + } + } + if best == "" { + label = ellipsis + } else { + label = best + } + } + + d := &font.Drawer{Face: face} + textW := d.MeasureString(label).Ceil() + textH := face.Metrics().Height.Ceil() + ascent := face.Metrics().Ascent.Ceil() + + x0 := b.Min.X + margin + y1 := b.Max.Y - margin + y0 := y1 - (textH + 2*padY) + x1 := x0 + (textW + 2*padX) + + maxX1 := b.Max.X - margin + if x1 > maxX1 { + shift := x1 - maxX1 + x0 -= shift + x1 -= shift + if x0 < b.Min.X+margin { + x0 = b.Min.X + margin + x1 = maxX1 + } + } + + minY0 := b.Min.Y + margin + if y0 < minY0 { + y0 = minY0 + y1 = y0 + (textH + 2*padY) + if y1 > b.Max.Y-margin { + return + } + } + + rect := image.Rect(x0, y0, x1, y1) + + bg := image.NewUniform(color.RGBA{0, 0, 0, 170}) + draw.Draw(img, rect, bg, image.Point{}, draw.Over) + + border := image.NewUniform(color.RGBA{255, 255, 255, 35}) + draw.Draw(img, image.Rect(rect.Min.X, rect.Min.Y, rect.Max.X, rect.Min.Y+1), border, image.Point{}, draw.Over) + draw.Draw(img, image.Rect(rect.Min.X, rect.Max.Y-1, rect.Max.X, rect.Max.Y), border, image.Point{}, draw.Over) + draw.Draw(img, image.Rect(rect.Min.X, rect.Min.Y, rect.Min.X+1, rect.Max.Y), border, image.Point{}, draw.Over) + draw.Draw(img, image.Rect(rect.Max.X-1, rect.Min.Y, rect.Max.X, rect.Max.Y), border, image.Point{}, draw.Over) + + tx := x0 + padX + ty := y0 + padY + ascent + + shadow := &font.Drawer{Dst: img, Src: image.NewUniform(color.RGBA{0, 0, 0, 200}), Face: face, Dot: fixed.P(tx+1, ty+1)} + shadow.DrawString(label) + + fg := &font.Drawer{Dst: img, Src: image.NewUniform(color.RGBA{255, 255, 255, 235}), Face: face, Dot: fixed.P(tx, ty)} + fg.DrawString(label) +} + +func splitTagsLoose(raw string) []string { + raw = strings.TrimSpace(raw) + if raw == "" { + return nil + } + parts := strings.FieldsFunc(raw, func(r rune) bool { + switch r { + case '\n', ',', ';', '|': + return true + } + return false + }) + out := make([]string, 0, len(parts)) + seen := map[string]struct{}{} + for _, p := range parts { + t := strings.TrimSpace(p) + if t == "" { + continue + } + low := strings.ToLower(t) + if _, ok := seen[low]; ok { + continue + } + seen[low] = struct{}{} + out = append(out, t) + } + return out +} + +func hasTag(tagsRaw string, want string) bool { + want = strings.ToLower(strings.TrimSpace(want)) + if want == "" { + return false + } + for _, t := range splitTagsLoose(tagsRaw) { + if strings.ToLower(strings.TrimSpace(t)) == want { + return true + } + } + return false +} + +type coverModel struct { + Key string + Tags string +} + +func listModelsForCovers() ([]coverModel, error) { + if coverModelStore == nil { + return nil, fmt.Errorf("model store not set") + } + + ms := coverModelStore.List() + out := make([]coverModel, 0, len(ms)) + for _, m := range ms { + key := strings.TrimSpace(m.ModelKey) + if key == "" { + continue + } + out = append(out, coverModel{Key: key, Tags: m.Tags}) + } + return out, nil +} + +func pickRandomThumbForCategory(ctx context.Context, category string) (thumbPath string, err error) { + category = strings.TrimSpace(category) + if category == "" { + return "", fmt.Errorf("category empty") + } + + select { + case <-ctx.Done(): + return "", ctx.Err() + default: + } + + models, err := listModelsForCovers() + if err != nil { + return "", err + } + + cands := make([]coverModel, 0, 64) + for _, m := range models { + key := strings.TrimSpace(m.Key) + if key == "" { + continue + } + if hasTag(m.Tags, category) { + cands = append(cands, coverModel{Key: key, Tags: m.Tags}) + } + } + if len(cands) == 0 { + return "", fmt.Errorf("no model with tag") + } + + rand.Shuffle(len(cands), func(i, j int) { cands[i], cands[j] = cands[j], cands[i] }) + + s := getSettings() + doneAbs, derr := resolvePathRelativeToApp(s.DoneDir) + if derr != nil || strings.TrimSpace(doneAbs) == "" { + return "", fmt.Errorf("doneDir resolve failed: %v", derr) + } + + type candFile struct { + videoPath string + id string + } + + isVideo := func(name string) bool { + low := strings.ToLower(name) + if strings.Contains(low, ".part") || strings.Contains(low, ".tmp") { + return false + } + ext := strings.ToLower(filepath.Ext(name)) + return ext == ".mp4" || ext == ".ts" + } + + for _, m := range cands { + select { + case <-ctx.Done(): + return "", ctx.Err() + default: + } + + modelKey := strings.TrimSpace(m.Key) + if modelKey == "" { + continue + } + + dirs := []string{filepath.Join(doneAbs, modelKey), filepath.Join(doneAbs, "keep", modelKey)} + files := make([]candFile, 0, 128) + for _, d := range dirs { + ents, err := os.ReadDir(d) + if err != nil { + continue + } + for _, e := range ents { + if e.IsDir() { + continue + } + name := e.Name() + if !isVideo(name) { + continue + } + full := filepath.Join(d, name) + stem := strings.TrimSuffix(name, filepath.Ext(name)) + id := stripHotPrefix(strings.TrimSpace(stem)) + if id == "" { + continue + } + files = append(files, candFile{videoPath: full, id: id}) + } + } + + if len(files) == 0 { + continue + } + + cf := files[rand.Intn(len(files))] + _ = ensureAssetsForVideo(cf.videoPath) + + tp, terr := generatedThumbFile(cf.id) + if terr != nil { + continue + } + if fi, serr := os.Stat(tp); serr == nil && !fi.IsDir() && fi.Size() > 0 { + return tp, nil + } + } + + return "", fmt.Errorf("no downloads/thumbs for category") +} + +var coverKeyRe = regexp.MustCompile(`[^a-z0-9._-]+`) + +func sanitizeCoverKey(category string) (string, error) { + c := strings.ToLower(strings.TrimSpace(category)) + if c == "" { + sum := sha1.Sum([]byte(category)) + c = "tag_" + hex.EncodeToString(sum[:8]) + } + if c == "" { + return "", fmt.Errorf("category fehlt") + } + c = strings.ReplaceAll(c, " ", "_") + c = coverKeyRe.ReplaceAllString(c, "_") + c = strings.Trim(c, "._-") + if c == "" { + return "", fmt.Errorf("category ungültig") + } + if len(c) > 120 { + c = c[:120] + } + return c, nil +} + +func detectImageExt(contentType string, b []byte) (ext string, ct string) { + ct = strings.ToLower(strings.TrimSpace(contentType)) + switch { + case strings.Contains(ct, "image/jpeg") || strings.Contains(ct, "image/jpg"): + return ".jpg", "image/jpeg" + case strings.Contains(ct, "image/png"): + return ".png", "image/png" + case strings.Contains(ct, "image/webp"): + return ".webp", "image/webp" + case strings.Contains(ct, "image/gif"): + return ".gif", "image/gif" + } + if len(b) >= 3 && b[0] == 0xFF && b[1] == 0xD8 && b[2] == 0xFF { + return ".jpg", "image/jpeg" + } + if len(b) >= 8 && bytes.Equal(b[:8], []byte{0x89, 'P', 'N', 'G', 0x0D, 0x0A, 0x1A, 0x0A}) { + return ".png", "image/png" + } + if len(b) >= 12 && string(b[:4]) == "RIFF" && string(b[8:12]) == "WEBP" { + return ".webp", "image/webp" + } + if len(b) >= 6 && (string(b[:6]) == "GIF87a" || string(b[:6]) == "GIF89a") { + return ".gif", "image/gif" + } + return ".jpg", "image/jpeg" +} + +func coverPathForCategory(key string, ext string) (string, error) { + root, err := coversRoot() + if err != nil { + return "", err + } + if strings.TrimSpace(root) == "" { + return "", fmt.Errorf("covers root ist leer") + } + if ext == "" { + ext = ".jpg" + } + return filepath.Join(root, key+ext), nil +} + +func findExistingCoverFile(key string) (string, os.FileInfo, bool) { + root, err := coversRoot() + if err != nil || strings.TrimSpace(root) == "" { + return "", nil, false + } + ext := []string{".jpg", ".png", ".webp", ".gif"} + for _, e := range ext { + p := filepath.Join(root, key+e) + if fi, err := os.Stat(p); err == nil && !fi.IsDir() && fi.Size() > 0 { + return p, fi, true + } + } + return "", nil, false +} + +func downloadBytes(ctx context.Context, rawURL string, ua string) ([]byte, string, error) { + rawURL = strings.TrimSpace(rawURL) + if rawURL == "" { + return nil, "", fmt.Errorf("src fehlt") + } + + // local: only /generated/... + if strings.HasPrefix(rawURL, "/") { + clean := path.Clean(rawURL) + if !strings.HasPrefix(clean, "/generated/") { + return nil, "", fmt.Errorf("src ungültig") + } + if strings.Contains(clean, "..") { + return nil, "", fmt.Errorf("src ungültig") + } + + rel := strings.TrimPrefix(clean, "/") + abs, err := resolvePathRelativeToApp(rel) + if err != nil || strings.TrimSpace(abs) == "" { + return nil, "", fmt.Errorf("src ungültig") + } + + f, err := os.Open(abs) + if err != nil { + return nil, "", fmt.Errorf("download failed: %v", err) + } + defer f.Close() + + b, err := io.ReadAll(io.LimitReader(f, 10*1024*1024)) + if err != nil { + return nil, "", fmt.Errorf("download failed: %v", err) + } + if len(b) == 0 { + return nil, "", fmt.Errorf("download empty") + } + + ext := strings.ToLower(filepath.Ext(abs)) + ct := "application/octet-stream" + switch ext { + case ".jpg", ".jpeg": + ct = "image/jpeg" + case ".png": + ct = "image/png" + case ".webp": + ct = "image/webp" + case ".gif": + ct = "image/gif" + } + + return b, ct, nil + } + + u, err := url.Parse(rawURL) + if err != nil || u.Scheme == "" || u.Host == "" { + return nil, "", fmt.Errorf("src ungültig") + } + if u.Scheme != "http" && u.Scheme != "https" { + return nil, "", fmt.Errorf("src schema nicht erlaubt") + } + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil) + if err != nil { + return nil, "", err + } + if strings.TrimSpace(ua) == "" { + ua = "Mozilla/5.0" + } + req.Header.Set("User-Agent", ua) + req.Header.Set("Accept", "image/*,*/*;q=0.8") + + client := &http.Client{Timeout: 12 * time.Second} + resp, err := client.Do(req) + if err != nil { + return nil, "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, "", fmt.Errorf("download failed: HTTP %d", resp.StatusCode) + } + + b, err := io.ReadAll(io.LimitReader(resp.Body, 10*1024*1024)) + if err != nil { + return nil, "", err + } + if len(b) == 0 { + return nil, "", fmt.Errorf("download empty") + } + + return b, resp.Header.Get("Content-Type"), nil +} + +var coverBatchMu sync.Mutex + +var ( + coverBatchInflight int + coverBatchStarted time.Time + + coverBatchTotal int + coverBatchForced int + coverBatchMiss int + coverBatchErrors int + coverBatchNoThumb int + coverBatchDecodeErr int +) + +func coverBatchEnter(force bool) { + coverBatchMu.Lock() + defer coverBatchMu.Unlock() + + if coverBatchInflight == 0 { + coverBatchStarted = time.Now() + coverBatchTotal = 0 + coverBatchForced = 0 + coverBatchMiss = 0 + coverBatchErrors = 0 + coverBatchNoThumb = 0 + coverBatchDecodeErr = 0 + log.Printf("[cover] BATCH START") + } + + coverBatchInflight++ + coverBatchTotal++ + if force { + coverBatchForced++ + } else { + coverBatchMiss++ + } +} + +func coverBatchLeave(outcome string, status int) { + coverBatchMu.Lock() + defer coverBatchMu.Unlock() + + if status >= 400 { + coverBatchErrors++ + } + switch outcome { + case "no-thumb": + coverBatchNoThumb++ + case "decode-failed-no-overlay": + coverBatchDecodeErr++ + } + + coverBatchInflight-- + if coverBatchInflight <= 0 { + dur := time.Since(coverBatchStarted).Round(time.Millisecond) + log.Printf( + "[cover] BATCH END total=%d miss=%d forced=%d errors=%d noThumb=%d decodeFail=%d took=%s", + coverBatchTotal, + coverBatchMiss, + coverBatchForced, + coverBatchErrors, + coverBatchNoThumb, + coverBatchDecodeErr, + dur, + ) + coverBatchInflight = 0 + } +} + +var reModelFromStem = regexp.MustCompile(`^(.*?)_\d{1,2}_\d{1,2}_\d{4}__\d{1,2}-\d{2}-\d{2}`) + +func inferModelFromStem(stem string) string { + stem = stripHotPrefix(strings.TrimSpace(stem)) + if stem == "" { + return "" + } + m := reModelFromStem.FindStringSubmatch(stem) + if len(m) >= 2 { + return strings.TrimSpace(m[1]) + } + return "" +} + +func inferModelFromThumbLike(srcOrPath string) string { + s := strings.TrimSpace(srcOrPath) + if s == "" { + return "" + } + + s = strings.ReplaceAll(s, `\\`, `/`) + if u, err := url.Parse(s); err == nil && u != nil && u.Scheme != "" && u.Host != "" { + s = u.Path + } + + base := path.Base(s) + lb := strings.ToLower(base) + if strings.HasPrefix(lb, "thumbs.") { + id := path.Base(path.Dir(s)) + return inferModelFromStem(id) + } + + stem := strings.TrimSuffix(base, path.Ext(base)) + return inferModelFromStem(stem) +} + +type coverInfoListItem struct { + Category string `json:"category"` + Model string `json:"model,omitempty"` + GeneratedAt string `json:"generatedAt,omitempty"` + HasCover bool `json:"hasCover"` +} + +func generatedCoverInfoList(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet && r.Method != http.MethodHead { + http.Error(w, "Nur GET/HEAD erlaubt", http.StatusMethodNotAllowed) + return + } + + root, err := coversRoot() + if err != nil { + http.Error(w, "covers root: "+err.Error(), http.StatusInternalServerError) + return + } + + entries, err := os.ReadDir(root) + if err != nil { + http.Error(w, "covers dir: "+err.Error(), http.StatusInternalServerError) + return + } + + byKey := map[string]*coverInfoListItem{} + ensure := func(key string) *coverInfoListItem { + if v, ok := byKey[key]; ok { + return v + } + v := &coverInfoListItem{Category: key} + byKey[key] = v + return v + } + + isCoverExt := func(ext string) bool { + switch strings.ToLower(ext) { + case ".jpg", ".jpeg", ".png", ".webp", ".gif": + return true + default: + return false + } + } + + for _, e := range entries { + name := e.Name() + lower := strings.ToLower(name) + + if strings.HasSuffix(lower, ".info.json") { + key := strings.TrimSuffix(name, ".info.json") + if ci, ok := readCoverInfoBestEffort(key); ok { + v := ensure(key) + if strings.TrimSpace(ci.Category) != "" { + v.Category = strings.TrimSpace(ci.Category) + } + if strings.TrimSpace(ci.Model) != "" { + v.Model = strings.TrimSpace(ci.Model) + } + if strings.TrimSpace(ci.GeneratedAt) != "" { + v.GeneratedAt = strings.TrimSpace(ci.GeneratedAt) + } + } + continue + } + + ext := filepath.Ext(name) + if isCoverExt(ext) { + key := strings.TrimSuffix(name, ext) + v := ensure(key) + v.HasCover = true + } + } + + for _, v := range byKey { + if !v.HasCover { + v.Model = "" + v.GeneratedAt = "" + } + if strings.TrimSpace(v.Category) == "" { + v.Category = "" + } + } + + keys := make([]string, 0, len(byKey)) + for k := range byKey { + keys = append(keys, k) + } + sort.Strings(keys) + + out := make([]coverInfoListItem, 0, len(keys)) + for _, k := range keys { + out = append(out, *byKey[k]) + } + + w.Header().Set("Content-Type", "application/json; charset=utf-8") + w.Header().Set("Cache-Control", "no-store") + if r.Method == http.MethodHead { + w.WriteHeader(http.StatusOK) + return + } + _ = json.NewEncoder(w).Encode(out) +} + +func generatedCover(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet && r.Method != http.MethodHead { + http.Error(w, "Nur GET/HEAD erlaubt", http.StatusMethodNotAllowed) + return + } + + category := r.URL.Query().Get("category") + key, err := sanitizeCoverKey(category) + if err != nil { + http.Error(w, "category ungültig: "+err.Error(), http.StatusBadRequest) + return + } + + refresh := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("refresh"))) + force := refresh == "1" || refresh == "true" || refresh == "yes" + + modelQ := strings.TrimSpace(r.URL.Query().Get("model")) + modelExplicit := modelQ != "" + model := modelQ + + src := strings.TrimSpace(r.URL.Query().Get("src")) + + fallbackModel := "" + if ci, ok := readCoverInfoBestEffort(key); ok { + if m := strings.TrimSpace(ci.Model); m != "" { + fallbackModel = m + } + } + if model == "" { + model = fallbackModel + } + if !modelExplicit && src != "" { + if m := inferModelFromThumbLike(src); m != "" { + model = m + } + } + + reqID := strconv.FormatInt(time.Now().UnixNano(), 36) + setDebugHeaders := func(cache string) { + w.Header().Set("X-Cover-Key", key) + w.Header().Set("X-Cover-Category", category) + if model != "" { + w.Header().Set("X-Cover-Model", model) + } + w.Header().Set("X-Cover-Cache", cache) + w.Header().Set("X-Request-Id", reqID) + } + + if !force { + if model != "" { + if ci, ok := readCoverInfoBestEffort(key); ok { + if strings.TrimSpace(ci.Model) != model { + force = true + } + } else { + force = true + } + } + + if !force { + if p, fi, ok := findExistingCoverFile(key); ok { + setDebugHeaders("HIT") + + if model != "" { + ci, ok := readCoverInfoBestEffort(key) + if !ok { + ci = coverInfo{Category: category} + } + ci.Category = category + ci.Model = strings.TrimSpace(model) + ci.GeneratedAt = time.Now().UTC().Format(time.RFC3339Nano) + writeCoverInfoBestEffort(key, ci) + } + + w.Header().Set("Cache-Control", "public, max-age=31536000") + w.Header().Set("X-Content-Type-Options", "nosniff") + + ext := strings.ToLower(filepath.Ext(p)) + switch ext { + case ".png": + w.Header().Set("Content-Type", "image/png") + case ".webp": + w.Header().Set("Content-Type", "image/webp") + case ".gif": + w.Header().Set("Content-Type", "image/gif") + default: + w.Header().Set("Content-Type", "image/jpeg") + } + + if r.Method == http.MethodHead { + w.WriteHeader(http.StatusOK) + return + } + + f, err := os.Open(p) + if err != nil { + http.NotFound(w, r) + return + } + defer f.Close() + + http.ServeContent(w, r, filepath.Base(p), fi.ModTime(), f) + return + } + } + } + + cacheStatus := "MISS" + if force { + cacheStatus = "FORCED" + } + setDebugHeaders(cacheStatus) + coverBatchEnter(force) + + start := time.Now() + status := http.StatusOK + outcome := "ok" + defer func() { + w.Header().Set("X-Cover-Gen-Ms", strconv.FormatInt(time.Since(start).Milliseconds(), 10)) + coverBatchLeave(outcome, status) + }() + + if _, err := ensureCoversDir(); err != nil { + status = http.StatusInternalServerError + outcome = "covers-dir" + http.Error(w, "covers-dir nicht verfügbar: "+err.Error(), status) + return + } + + ctx, cancel := context.WithTimeout(r.Context(), 20*time.Second) + defer cancel() + + var ( + raw []byte + mimeType string + ext string + ) + + thumbPath := "" + usedSrc := "" + + if src != "" { + var derr error + raw, mimeType, derr = downloadBytes(ctx, src, r.Header.Get("User-Agent")) + usedSrc = normalizeCoverSrc(src) + if derr != nil { + status = http.StatusBadRequest + outcome = "src-download" + http.Error(w, "src download failed: "+derr.Error(), status) + return + } + ext, mimeType = detectImageExt(mimeType, raw) + if len(raw) == 0 { + status = http.StatusBadRequest + outcome = "src-empty" + http.Error(w, "src leer", status) + return + } + if model == "" { + if m := inferModelFromThumbLike(src); m != "" { + model = m + w.Header().Set("X-Cover-Model", model) + } + } + } else { + var perr error + thumbPath, perr = pickRandomThumbForCategory(ctx, category) + if perr != nil { + if p, fi, ok := findExistingCoverFile(key); ok { + outcome = "fallback-existing-cover" + status = http.StatusOK + + w.Header().Set("Cache-Control", "public, max-age=600") + w.Header().Set("X-Content-Type-Options", "nosniff") + + ext2 := strings.ToLower(filepath.Ext(p)) + switch ext2 { + case ".png": + w.Header().Set("Content-Type", "image/png") + case ".webp": + w.Header().Set("Content-Type", "image/webp") + case ".gif": + w.Header().Set("Content-Type", "image/gif") + default: + w.Header().Set("Content-Type", "image/jpeg") + } + + if r.Method == http.MethodHead { + w.WriteHeader(http.StatusOK) + return + } + + f, err := os.Open(p) + if err != nil { + servePreviewStatusSVG(w, "No Cover", status) + return + } + defer f.Close() + http.ServeContent(w, r, filepath.Base(p), fi.ModTime(), f) + return + } + + outcome = "no-thumb" + status = http.StatusNotFound + if r.Method == http.MethodHead { + w.WriteHeader(status) + return + } + servePreviewStatusSVG(w, "No Cover", status) + return + } + + usedSrc = normalizeCoverSrc(thumbPath) + raw, err = os.ReadFile(thumbPath) + if err != nil || len(raw) == 0 { + status = http.StatusInternalServerError + outcome = "thumb-read" + http.Error(w, "cover read fehlgeschlagen", status) + return + } + ext = ".jpg" + mimeType = "image/jpeg" + if model == "" { + if m := inferModelFromThumbLike(thumbPath); m != "" { + model = m + w.Header().Set("X-Cover-Model", model) + } + } + } + + if !modelExplicit { + if m := inferModelFromThumbLike(usedSrc); m != "" { + model = m + w.Header().Set("X-Cover-Model", model) + } + } + + img, _, derr := image.Decode(bytes.NewReader(raw)) + if derr == nil && img != nil { + rgba := image.NewRGBA(img.Bounds()) + draw.Draw(rgba, rgba.Bounds(), img, img.Bounds().Min, draw.Src) + if strings.TrimSpace(model) != "" { + drawLabel(rgba, model) + } + + var buf bytes.Buffer + switch strings.ToLower(ext) { + case ".png": + _ = png.Encode(&buf, rgba) + raw = buf.Bytes() + ext = ".png" + mimeType = "image/png" + default: + _ = jpeg.Encode(&buf, rgba, &jpeg.Options{Quality: 85}) + raw = buf.Bytes() + ext = ".jpg" + mimeType = "image/jpeg" + } + } else { + outcome = "decode-failed-no-overlay" + } + + root, _ := coversRoot() + for _, e := range []string{".jpg", ".png", ".webp", ".gif"} { + _ = os.Remove(filepath.Join(root, key+e)) + } + _ = os.Remove(filepath.Join(root, key+".info.json")) + + dst, err := coverPathForCategory(key, ext) + if err != nil { + status = http.StatusInternalServerError + outcome = "cover-path" + http.Error(w, "cover path: "+err.Error(), status) + return + } + if err := atomicWriteFile(dst, raw); err != nil { + status = http.StatusInternalServerError + outcome = "cover-write" + http.Error(w, "cover write: "+err.Error(), status) + return + } + + writeCoverInfoBestEffort(key, coverInfo{ + Category: category, + Model: strings.TrimSpace(model), + Src: strings.TrimSpace(usedSrc), + GeneratedAt: time.Now().UTC().Format(time.RFC3339Nano), + }) + + w.Header().Set("Cache-Control", "public, max-age=600") + w.Header().Set("Content-Type", mimeType) + w.Header().Set("X-Content-Type-Options", "nosniff") + w.Header().Set("X-Cover-Bytes", strconv.Itoa(len(raw))) + + if r.Method == http.MethodHead { + w.WriteHeader(http.StatusOK) + return + } + + w.WriteHeader(http.StatusOK) + _, _ = w.Write(raw) +} + +// ============================================================ +// Status SVG (Preview placeholder) +// ============================================================ + +func servePreviewStatusSVG(w http.ResponseWriter, label string, status int) { + w.Header().Set("Content-Type", "image/svg+xml; charset=utf-8") + w.Header().Set("Cache-Control", "no-store") + w.Header().Set("X-Content-Type-Options", "nosniff") + + if status <= 0 { + status = http.StatusOK + } + + title := html.EscapeString(strings.TrimSpace(label)) + if title == "" { + title = "Preview" + } + + svg := ` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ` + title + ` + + Preview nicht verfügbar + +` + + w.WriteHeader(status) + _, _ = w.Write([]byte(svg)) +} + +// ============================================================ +// WebP extraction + preview endpoint +// Route: +// - /api/preview?id= (returns preview.webp / 204 / svg) +// - /api/preview?id=&file=preview.webp +// ============================================================ + +// --- WebP extraction helpers --- + +func extractLastFrameWebP(path string) ([]byte, error) { + cmd := exec.Command( + ffmpegPath, + "-hide_banner", "-loglevel", "error", + "-sseof", "-0.1", + "-i", path, + "-frames:v", "1", + "-vf", "scale=720:-2", + "-quality", "75", + "-f", "image2pipe", + "-vcodec", "libwebp", + "pipe:1", + ) + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("ffmpeg last-frame webp: %w (%s)", err, strings.TrimSpace(stderr.String())) + } + b := out.Bytes() + if len(b) == 0 { + return nil, fmt.Errorf("ffmpeg last-frame webp: empty output") + } + return b, nil +} + +func extractFrameAtTimeWebP(path string, seconds float64) ([]byte, error) { + if seconds < 0 { + seconds = 0 + } + seek := fmt.Sprintf("%.3f", seconds) + + cmd := exec.Command( + ffmpegPath, + "-hide_banner", "-loglevel", "error", + "-ss", seek, + "-i", path, + "-frames:v", "1", + "-vf", "scale=720:-2", + "-quality", "75", + "-f", "image2pipe", + "-vcodec", "libwebp", + "pipe:1", + ) + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("ffmpeg frame-at-time webp: %w (%s)", err, strings.TrimSpace(stderr.String())) + } + b := out.Bytes() + if len(b) == 0 { + return nil, fmt.Errorf("ffmpeg frame-at-time webp: empty output") + } + return b, nil +} + +func extractLastFrameWebPScaled(path string, width int, quality int) ([]byte, error) { + if width <= 0 { + width = 320 + } + if quality <= 0 || quality > 100 { + quality = 70 + } + + cmd := exec.Command( + ffmpegPath, + "-hide_banner", "-loglevel", "error", + "-sseof", "-0.25", + "-i", path, + "-frames:v", "1", + "-vf", fmt.Sprintf("scale=%d:-2", width), + "-quality", strconv.Itoa(quality), + "-f", "image2pipe", + "-vcodec", "libwebp", + "pipe:1", + ) + + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("ffmpeg last-frame scaled webp: %w (%s)", err, strings.TrimSpace(stderr.String())) + } + b := out.Bytes() + if len(b) == 0 { + return nil, fmt.Errorf("ffmpeg last-frame scaled webp: empty output") + } + return b, nil +} + +func extractFirstFrameWebPScaled(path string, width int, quality int) ([]byte, error) { + if width <= 0 { + width = 320 + } + if quality <= 0 || quality > 100 { + quality = 70 + } + + cmd := exec.Command( + ffmpegPath, + "-hide_banner", "-loglevel", "error", + "-ss", "0", + "-i", path, + "-frames:v", "1", + "-vf", fmt.Sprintf("scale=%d:-2", width), + "-quality", strconv.Itoa(quality), + "-f", "image2pipe", + "-vcodec", "libwebp", + "pipe:1", + ) + + var out bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &out + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("ffmpeg first-frame scaled webp: %w (%s)", err, strings.TrimSpace(stderr.String())) + } + b := out.Bytes() + if len(b) == 0 { + return nil, fmt.Errorf("ffmpeg first-frame scaled webp: empty output") + } + return b, nil +} + +func latestPreviewSegment(previewDir string) (string, error) { + entries, err := os.ReadDir(previewDir) + if err != nil { + return "", err + } + var best string + for _, e := range entries { + if e.IsDir() { + continue + } + name := e.Name() + if !strings.HasPrefix(name, "seg_low_") && !strings.HasPrefix(name, "seg_hq_") { + continue + } + if best == "" || name > best { + best = name + } + } + if best == "" { + return "", fmt.Errorf("kein Preview-Segment in %s", previewDir) + } + return filepath.Join(previewDir, best), nil +} + +func extractLastFrameFromPreviewDirThumbWebP(previewDir string) ([]byte, error) { + seg, err := latestPreviewSegment(previewDir) + if err != nil { + return nil, err + } + img, err := extractLastFrameWebPScaled(seg, 320, 70) + if err == nil && len(img) > 0 { + return img, nil + } + return extractFirstFrameWebPScaled(seg, 320, 70) +} + +func extractLastFrameFromPreviewDirWebP(previewDir string) ([]byte, error) { + seg, err := latestPreviewSegment(previewDir) + if err != nil { + return nil, err + } + img, err := extractLastFrameWebP(seg) + if err != nil { + return extractFirstFrameWebPScaled(seg, 720, 75) + } + return img, nil +} + +func serveLivePreviewWebPFile(w http.ResponseWriter, r *http.Request, path string) { + f, err := os.Open(path) + if err != nil { + http.NotFound(w, r) + return + } + defer f.Close() + + st, err := f.Stat() + if err != nil || st.IsDir() || st.Size() == 0 { + http.NotFound(w, r) + return + } + + w.Header().Set("Content-Type", "image/webp") + w.Header().Set("Cache-Control", "no-store") + http.ServeContent(w, r, "preview.webp", st.ModTime(), f) +} + +func servePreviewWebPFile(w http.ResponseWriter, r *http.Request, path string) { + f, err := os.Open(path) + if err != nil { + http.NotFound(w, r) + return + } + defer f.Close() + + st, err := f.Stat() + if err != nil || st.IsDir() || st.Size() == 0 { + http.NotFound(w, r) + return + } + + w.Header().Set("Content-Type", "image/webp") + w.Header().Set("Cache-Control", "public, max-age=600") + http.ServeContent(w, r, filepath.Base(path), st.ModTime(), f) +} + +func servePreviewWebPBytes(w http.ResponseWriter, b []byte) { + if len(b) == 0 { + w.WriteHeader(http.StatusNoContent) + return + } + w.Header().Set("Content-Type", "image/webp") + w.Header().Set("Cache-Control", "public, max-age=60") + w.WriteHeader(http.StatusOK) + _, _ = w.Write(b) +} + +func serveLivePreviewWebPBytes(w http.ResponseWriter, b []byte) { + if len(b) == 0 { + w.Header().Set("Cache-Control", "no-store") + w.WriteHeader(http.StatusNoContent) + return + } + w.Header().Set("Content-Type", "image/webp") + w.Header().Set("Cache-Control", "no-store") + w.WriteHeader(http.StatusOK) + _, _ = w.Write(b) +} + +func servePreviewWebPAlias(w http.ResponseWriter, r *http.Request, id string) { + jobsMu.Lock() + job := jobs[id] + jobsMu.Unlock() + + if job != nil { + assetID := assetIDForJob(job) + if assetID != "" { + if webpPath, err := generatedThumbWebPFile(assetID); err == nil { + if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 { + if job.Status == JobRunning { + serveLivePreviewWebPFile(w, r, webpPath) + } else { + servePreviewWebPFile(w, r, webpPath) + } + return + } + } + } + + if job.Status == JobRunning { + job.previewMu.Lock() + cached := job.previewWebp + job.previewMu.Unlock() + if len(cached) > 0 { + serveLivePreviewWebPBytes(w, cached) + return + } + } + + servePreviewStatusSVG(w, "Preview", http.StatusOK) + return + } + + assetID := stripHotPrefix(strings.TrimSpace(id)) + if assetID == "" { + http.NotFound(w, r) + return + } + if webpPath, err := generatedThumbWebPFile(assetID); err == nil { + if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 { + servePreviewWebPFile(w, r, webpPath) + return + } + } + http.NotFound(w, r) +} + +func recordPreview(w http.ResponseWriter, r *http.Request) { + // Standard: rewrite soll auf /api/preview zeigen + recordPreviewWithBase(w, r, "/api/preview") +} + +func recordPreviewWithBase(w http.ResponseWriter, r *http.Request, basePath string) { + if r.Method != http.MethodGet && r.Method != http.MethodHead { + http.Error(w, "method not allowed", http.StatusMethodNotAllowed) + return + } + + id := strings.TrimSpace(r.URL.Query().Get("id")) + if id == "" { + id = strings.TrimSpace(r.URL.Query().Get("name")) + } + if id == "" { + http.Error(w, "id fehlt", http.StatusBadRequest) + return + } + + // HLS / file serving + if file := strings.TrimSpace(r.URL.Query().Get("file")); file != "" { + low := strings.ToLower(file) + if low == "preview.webp" { + servePreviewWebPAlias(w, r, id) + return + } + // ✅ Wichtig: HLS rewrite soll auf basePath zeigen (/api/preview oder /api/preview/live) + servePreviewHLSFileWithBase(w, r, id, file, basePath) + return + } + + // WebP preview (running jobs have live thumb behavior) + jobsMu.Lock() + job, ok := jobs[id] + jobsMu.Unlock() + + if ok { + if job.Status == JobRunning { + assetID := assetIDForJob(job) + if assetID != "" { + if webpPath, err := generatedThumbWebPFile(assetID); err == nil { + if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 { + serveLivePreviewWebPFile(w, r, webpPath) + return + } + } + } + } + + job.previewMu.Lock() + cached := job.previewWebp + cachedAt := job.previewWebpAt + fresh := len(cached) > 0 && !cachedAt.IsZero() && time.Since(cachedAt) < 8*time.Second + + if !fresh && !job.previewGen { + job.previewGen = true + go func(j *RecordJob) { + defer func() { + j.previewMu.Lock() + j.previewGen = false + j.previewMu.Unlock() + }() + + var img []byte + var genErr error + + previewDir := strings.TrimSpace(j.PreviewDir) + if previewDir != "" { + img, genErr = extractLastFrameFromPreviewDirWebP(previewDir) + } + + if genErr != nil || len(img) == 0 { + outPath := strings.TrimSpace(j.Output) + if outPath != "" { + outPath = filepath.Clean(outPath) + if !filepath.IsAbs(outPath) { + if abs, err := resolvePathRelativeToApp(outPath); err == nil { + outPath = abs + } + } + if fi, err := os.Stat(outPath); err == nil && !fi.IsDir() && fi.Size() > 0 { + img, genErr = extractLastFrameWebP(outPath) + if genErr != nil { + img, _ = extractFirstFrameWebPScaled(outPath, 720, 75) + } + } + } + } + + if len(img) > 0 { + j.previewMu.Lock() + j.previewWebp = img + j.previewWebpAt = time.Now() + j.previewMu.Unlock() + } + }(job) + } + + out := cached + job.previewMu.Unlock() + + if len(out) > 0 { + serveLivePreviewWebPBytes(w, out) + return + } + + jobsMu.Lock() + state := strings.TrimSpace(job.PreviewState) + jobsMu.Unlock() + + if state == "private" { + servePreviewStatusSVG(w, "Private", http.StatusOK) + return + } + if state == "offline" { + servePreviewStatusSVG(w, "Offline", http.StatusOK) + return + } + + w.Header().Set("Cache-Control", "no-store") + w.WriteHeader(http.StatusNoContent) + return + } + + // Finished file preview + servePreviewForFinishedFile(w, r, id) +} + +func updateLiveThumbWebPOnce(ctx context.Context, job *RecordJob) { + jobsMu.Lock() + status := job.Status + previewDir := job.PreviewDir + out := job.Output + jobsMu.Unlock() + + if status != JobRunning { + return + } + + assetID := assetIDForJob(job) + thumbPath, err := generatedThumbWebPFile(assetID) + if err != nil { + return + } + + if st, err := os.Stat(thumbPath); err == nil && st.Size() > 0 { + if time.Since(st.ModTime()) < 10*time.Second { + return + } + } + + if thumbSem != nil { + thumbCtx, cancel := context.WithTimeout(ctx, 3*time.Second) + defer cancel() + if err := thumbSem.Acquire(thumbCtx); err != nil { + return + } + defer thumbSem.Release() + } + + var img []byte + if previewDir != "" { + if b, err := extractLastFrameFromPreviewDirThumbWebP(previewDir); err == nil && len(b) > 0 { + img = b + } + } + if len(img) == 0 && out != "" { + if b, err := extractLastFrameWebPScaled(out, 320, 70); err == nil && len(b) > 0 { + img = b + } + } + if len(img) == 0 { + return + } + _ = atomicWriteFile(thumbPath, img) +} + +func startLiveThumbWebPLoop(ctx context.Context, job *RecordJob) { + jobsMu.Lock() + if job.LiveThumbStarted { + jobsMu.Unlock() + return + } + job.LiveThumbStarted = true + jobsMu.Unlock() + + go func() { + updateLiveThumbWebPOnce(ctx, job) + for { + select { + case <-ctx.Done(): + return + case <-time.After(10 * time.Second): + jobsMu.Lock() + st := job.Status + jobsMu.Unlock() + if st != JobRunning { + return + } + updateLiveThumbWebPOnce(ctx, job) + } + } + }() +} + +func servePreviewForFinishedFile(w http.ResponseWriter, r *http.Request, id string) { + var err error + id, err = sanitizeID(id) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + outPath, err := findFinishedFileByID(id) + if err != nil { + http.Error(w, "preview nicht verfügbar", http.StatusNotFound) + return + } + + if err := ensureGeneratedDirs(); err != nil { + http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) + return + } + + assetID := stripHotPrefix(id) + if assetID == "" { + assetID = id + } + + assetDir, err := ensureGeneratedDir(assetID) + if err != nil { + http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) + return + } + + if tStr := strings.TrimSpace(r.URL.Query().Get("t")); tStr != "" { + if sec, err := strconv.ParseFloat(tStr, 64); err == nil && sec >= 0 { + secI := int64(sec + 0.5) + if secI < 0 { + secI = 0 + } + framePath := filepath.Join(assetDir, fmt.Sprintf("t_%d.webp", secI)) + if fi, err := os.Stat(framePath); err == nil && !fi.IsDir() && fi.Size() > 0 { + servePreviewWebPFile(w, r, framePath) + return + } + img, err := extractFrameAtTimeWebP(outPath, float64(secI)) + if err == nil && len(img) > 0 { + _ = atomicWriteFile(framePath, img) + servePreviewWebPBytes(w, img) + return + } + } + } + + thumbPath := filepath.Join(assetDir, "preview.webp") + if fi, err := os.Stat(thumbPath); err == nil && !fi.IsDir() && fi.Size() > 0 { + servePreviewWebPFile(w, r, thumbPath) + return + } + + genCtx, cancel := context.WithTimeout(r.Context(), 30*time.Second) + defer cancel() + + // ✅ Immer letztes Frame bevorzugen + img, err := extractLastFrameWebP(outPath) + + if err != nil || len(img) == 0 { + // Fallback: kurz vor Ende, falls Duration verfügbar + if dur, derr := durationSecondsCached(genCtx, outPath); derr == nil && dur > 0 { + t := dur - 0.25 + if t < 0 { + t = 0 + } + img, err = extractFrameAtTimeWebP(outPath, t) + } + + // Letzter Fallback: erstes Frame + if err != nil || len(img) == 0 { + img, err = extractFirstFrameWebPScaled(outPath, 720, 75) + if err != nil || len(img) == 0 { + http.Error(w, "konnte preview nicht erzeugen", http.StatusInternalServerError) + return + } + } + } + + _ = atomicWriteFile(thumbPath, img) + servePreviewWebPBytes(w, img) +} + +// ============================================================ +// Teaser generation (used by /api/generated/teaser) +// ============================================================ + +const minSegmentDuration = 0.75 +const defaultTeaserSegments = 12 + +type TeaserPreviewOptions struct { + Segments int + SegmentDuration float64 + Width int + Preset string + CRF int + Audio bool + AudioBitrate string + UseVsync2 bool +} + +func (o TeaserPreviewOptions) stepSizeAndOffset(dur float64) (float64, float64) { + if dur <= 0 { + return 0, 0 + } + + n := o.Segments + if n < 1 { + n = 1 + } + + segDur := o.SegmentDuration + if segDur <= 0 { + segDur = 1 + } + if segDur < minSegmentDuration { + segDur = minSegmentDuration + } + + maxStart := dur - 0.05 - segDur + if maxStart < 0 { + maxStart = 0 + } + if n == 1 { + return 0, maxStart * 0.5 + } + + margin := 0.05 * maxStart + if margin < 0 { + margin = 0 + } + span := maxStart - 2*margin + if span < 0 { + span = maxStart + margin = 0 + } + + step := 0.0 + if n > 1 { + step = span / float64(n-1) + } + return step, margin +} + +func generateTeaserClipsMP4(ctx context.Context, srcPath, outPath string, clipLenSec float64, maxClips int) error { + return generateTeaserClipsMP4WithProgress(ctx, srcPath, outPath, clipLenSec, maxClips, nil) +} + +func generateTeaserClipsMP4WithProgress(ctx context.Context, srcPath, outPath string, clipLenSec float64, maxClips int, onRatio func(r float64)) error { + opts := TeaserPreviewOptions{ + Segments: maxClips, + SegmentDuration: clipLenSec, + Width: 640, + Preset: "veryfast", + CRF: 21, + Audio: true, + AudioBitrate: "128k", + UseVsync2: false, + } + return generateTeaserPreviewMP4WithProgress(ctx, srcPath, outPath, opts, onRatio) +} + +func generateTeaserChunkMP4(ctx context.Context, src, out string, start, dur float64, opts TeaserPreviewOptions) error { + opts.Audio = true + + tmp := strings.TrimSuffix(out, ".mp4") + ".part.mp4" + segDur := dur + if segDur < minSegmentDuration { + segDur = minSegmentDuration + } + + args := []string{"-y", "-hide_banner", "-loglevel", "error"} + args = append(args, ffmpegInputTol...) + args = append(args, + "-ss", fmt.Sprintf("%.3f", start), + "-t", fmt.Sprintf("%.3f", segDur), + "-i", src, + "-map", "0:v:0", + "-c:v", "libx264", + "-pix_fmt", "yuv420p", + "-profile:v", "high", + "-level", "4.2", + "-preset", opts.Preset, + "-crf", strconv.Itoa(opts.CRF), + "-threads", "4", + ) + + if opts.UseVsync2 { + args = append(args, "-vsync", "2") + } + + args = append(args, + "-map", "0:a:0", + "-c:a", "aac", + "-b:a", opts.AudioBitrate, + "-ac", "2", + "-shortest", + ) + + args = append(args, "-movflags", "+faststart", tmp) + + cmd := exec.CommandContext(ctx, ffmpegPath, args...) + var stderr bytes.Buffer + cmd.Stderr = &stderr + if err := cmd.Run(); err != nil { + _ = os.Remove(tmp) + return fmt.Errorf("ffmpeg teaser chunk failed: %v (%s)", err, strings.TrimSpace(stderr.String())) + } + _ = os.Remove(out) + return os.Rename(tmp, out) +} + +func computeTeaserStarts(dur float64, opts TeaserPreviewOptions) (starts []float64, segDur float64, usedSegments int) { + if opts.SegmentDuration <= 0 { + opts.SegmentDuration = 1 + } + if opts.Segments <= 0 { + opts.Segments = defaultTeaserSegments + } + segDur = opts.SegmentDuration + if segDur < minSegmentDuration { + segDur = minSegmentDuration + } + + if dur > 0 && dur < segDur*float64(opts.Segments) { + opts.Segments = 1 + segDur = dur + } + + usedSegments = opts.Segments + if !(dur > 0) { + return []float64{0}, segDur, 1 + } + + stepSize, offset := opts.stepSizeAndOffset(dur) + starts = make([]float64, 0, opts.Segments) + for i := 0; i < opts.Segments; i++ { + t := offset + float64(i)*stepSize + maxStart := math.Max(0, dur-0.05-segDur) + if t < 0 { + t = 0 + } + if t > maxStart { + t = maxStart + } + if t < 0.05 { + t = 0.05 + } + starts = append(starts, t) + } + return starts, segDur, usedSegments +} + +func generateTeaserPreviewMP4WithProgress(ctx context.Context, srcPath, outPath string, opts TeaserPreviewOptions, onRatio func(r float64)) error { + opts.Audio = true + if opts.SegmentDuration <= 0 { + opts.SegmentDuration = 1 + } + if opts.Segments <= 0 { + opts.Segments = defaultTeaserSegments + } + if opts.Width <= 0 { + opts.Width = 640 + } + if opts.Preset == "" { + opts.Preset = "veryfast" + } + if opts.CRF <= 0 { + opts.CRF = 21 + } + if opts.AudioBitrate == "" { + opts.AudioBitrate = "128k" + } + segDur := opts.SegmentDuration + if segDur < minSegmentDuration { + segDur = minSegmentDuration + } + + dur, _ := durationSecondsCached(ctx, srcPath) + if dur > 0 && dur < segDur*float64(opts.Segments) { + opts.Segments = 1 + segDur = dur + } + + if !(dur > 0) { + if onRatio != nil { + onRatio(0) + } + err := generateTeaserChunkMP4(ctx, srcPath, outPath, 0, math.Min(8, segDur), opts) + if onRatio != nil { + onRatio(1) + } + return err + } + + starts, segDurComputed, _ := computeTeaserStarts(dur, opts) + segDur = segDurComputed + + expectedOutSec := float64(len(starts)) * segDur + tmp := strings.TrimSuffix(outPath, ".mp4") + ".part.mp4" + + args := []string{"-y", "-nostats", "-progress", "pipe:1", "-hide_banner", "-loglevel", "error"} + for _, t := range starts { + args = append(args, ffmpegInputTol...) + args = append(args, "-ss", fmt.Sprintf("%.3f", t), "-t", fmt.Sprintf("%.3f", segDur), "-i", srcPath) + } + + var fc strings.Builder + for i := range starts { + fmt.Fprintf(&fc, "[%d:v]scale=%d:-2,setsar=1,setpts=PTS-STARTPTS[v%d];", i, opts.Width, i) + fmt.Fprintf(&fc, "[%d:a]aresample=48000,aformat=channel_layouts=stereo,asetpts=PTS-STARTPTS[a%d];", i, i) + } + for i := range starts { + fmt.Fprintf(&fc, "[v%d][a%d]", i, i) + } + fmt.Fprintf(&fc, "concat=n=%d:v=1:a=1[v][a]", len(starts)) + + args = append(args, "-filter_complex", fc.String()) + args = append(args, "-map", "[v]", "-map", "[a]") + args = append(args, + "-c:v", "libx264", + "-pix_fmt", "yuv420p", + "-profile:v", "high", + "-level", "4.2", + "-preset", opts.Preset, + "-crf", strconv.Itoa(opts.CRF), + "-threads", "4", + ) + if opts.UseVsync2 { + args = append(args, "-vsync", "2") + } + args = append(args, + "-c:a", "aac", + "-b:a", opts.AudioBitrate, + "-ac", "2", + "-shortest", + ) + args = append(args, "-movflags", "+faststart", tmp) + + cmd := exec.CommandContext(ctx, ffmpegPath, args...) + stdout, err := cmd.StdoutPipe() + if err != nil { + return err + } + var stderr bytes.Buffer + cmd.Stderr = &stderr + if err := cmd.Start(); err != nil { + return err + } + + sc := bufio.NewScanner(stdout) + sc.Buffer(make([]byte, 0, 64*1024), 1024*1024) + + var outSec float64 + var lastSent float64 + var lastAt time.Time + + send := func(outSec float64, force bool) { + if onRatio == nil { + return + } + if expectedOutSec > 0 && outSec > 0 { + r := outSec / expectedOutSec + if r < 0 { + r = 0 + } + if r > 1 { + r = 1 + } + if r-lastSent < 0.01 && !force { + return + } + if !lastAt.IsZero() && time.Since(lastAt) < 150*time.Millisecond && !force { + return + } + lastSent = r + lastAt = time.Now() + onRatio(r) + return + } + if force { + onRatio(1) + } + } + + for sc.Scan() { + line := strings.TrimSpace(sc.Text()) + if line == "" { + continue + } + parts := strings.SplitN(line, "=", 2) + if len(parts) != 2 { + continue + } + k, v := parts[0], parts[1] + switch k { + case "out_time_ms": + if n, perr := strconv.ParseInt(strings.TrimSpace(v), 10, 64); perr == nil && n > 0 { + outSec = float64(n) / 1_000_000.0 + send(outSec, false) + } + case "out_time": + if s := parseFFmpegOutTime(v); s > 0 { + outSec = s + send(outSec, false) + } + case "progress": + if strings.TrimSpace(v) == "end" { + send(outSec, true) + } + } + } + + if err := cmd.Wait(); err != nil { + _ = os.Remove(tmp) + return fmt.Errorf("ffmpeg teaser preview failed: %v (%s)", err, strings.TrimSpace(stderr.String())) + } + + _ = os.Remove(outPath) + return os.Rename(tmp, outPath) +} + +func serveTeaserFile(w http.ResponseWriter, r *http.Request, path string) { + f, err := openForReadShareDelete(path) + if err != nil { + http.Error(w, "datei öffnen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) + return + } + defer f.Close() + + fi, err := f.Stat() + if err != nil || fi.IsDir() || fi.Size() == 0 { + http.Error(w, "datei nicht gefunden", http.StatusNotFound) + return + } + + w.Header().Set("Cache-Control", "public, max-age=31536000") + w.Header().Set("Content-Type", "video/mp4") + http.ServeContent(w, r, filepath.Base(path), fi.ModTime(), f) +} + +// tolerante Input-Flags für kaputte/abgeschnittene H264/TS Streams +var ffmpegInputTol = []string{ + "-fflags", "+discardcorrupt+genpts", + "-err_detect", "ignore_err", + "-max_error_rate", "1.0", +} + +func generateTeaserMP4(ctx context.Context, srcPath, outPath string, startSec, durSec float64) error { + if durSec <= 0 { + durSec = 8 + } + if startSec < 0 { + startSec = 0 + } + + // temp schreiben -> rename + tmp := outPath + ".tmp.mp4" + + args := []string{ + "-y", + "-hide_banner", + "-loglevel", "error", + } + args = append(args, ffmpegInputTol...) + args = append(args, + "-ss", fmt.Sprintf("%.3f", startSec), + "-i", srcPath, + "-t", fmt.Sprintf("%.3f", durSec), + + // Video + "-vf", "scale=720:-2", + "-map", "0:v:0", + + // Audio (optional: falls kein Audio vorhanden ist, bricht ffmpeg NICHT ab) + "-map", "0:a:0", + "-c:a", "aac", + "-b:a", "128k", + "-ac", "2", + + "-c:v", "libx264", + "-preset", "veryfast", + "-crf", "28", + "-pix_fmt", "yuv420p", + + // Wenn Audio minimal kürzer/länger ist, sauber beenden + "-shortest", + + "-movflags", "+faststart", + "-f", "mp4", + tmp, + ) + + cmd := exec.CommandContext(ctx, ffmpegPath, args...) + if out, err := cmd.CombinedOutput(); err != nil { + _ = os.Remove(tmp) + return fmt.Errorf("ffmpeg teaser failed: %v (%s)", err, strings.TrimSpace(string(out))) + } + + _ = os.Remove(outPath) + return os.Rename(tmp, outPath) +} + +func generatedTeaser(w http.ResponseWriter, r *http.Request) { + id := strings.TrimSpace(r.URL.Query().Get("id")) + if id == "" { + http.Error(w, "id fehlt", http.StatusBadRequest) + return + } + + var err error + id, err = sanitizeID(id) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + outPath, err := findFinishedFileByID(id) + if err != nil { + http.Error(w, "preview nicht verfügbar", http.StatusNotFound) + return + } + + if err := ensureGeneratedDirs(); err != nil { + http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) + return + } + + assetID := stripHotPrefix(id) + if assetID == "" { + assetID = id + } + + assetDir, err := ensureGeneratedDir(assetID) + if err != nil { + http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) + return + } + + previewPath := filepath.Join(assetDir, "preview.mp4") + + // ✅ NEU: noGenerate=1 -> niemals on-the-fly erzeugen, nur liefern wenn vorhanden + qNoGen := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("noGenerate"))) + noGen := qNoGen == "1" || qNoGen == "true" || qNoGen == "yes" + + // Cache hit (neu) + if fi, err := os.Stat(previewPath); err == nil && !fi.IsDir() && fi.Size() > 0 { + serveTeaserFile(w, r, previewPath) + return + } + + // Legacy: generated/teaser/_teaser.mp4 oder .mp4 + if teaserLegacy, _ := generatedTeaserRoot(); strings.TrimSpace(teaserLegacy) != "" { + cids := []string{assetID, id} + for _, cid := range cids { + candidates := []string{ + filepath.Join(teaserLegacy, cid+"_teaser.mp4"), + filepath.Join(teaserLegacy, cid+".mp4"), + } + for _, c := range candidates { + if fi, err := os.Stat(c); err == nil && !fi.IsDir() && fi.Size() > 0 { + if _, err2 := os.Stat(previewPath); os.IsNotExist(err2) { + _ = os.MkdirAll(filepath.Dir(previewPath), 0o755) + _ = os.Rename(c, previewPath) + } + if fi2, err2 := os.Stat(previewPath); err2 == nil && !fi2.IsDir() && fi2.Size() > 0 { + serveTeaserFile(w, r, previewPath) + return + } + serveTeaserFile(w, r, c) + return + } + } + } + } + + // ✅ NEU: wenn noGenerate aktiv und bisher kein Teaser gefunden -> 404 + if noGen { + http.Error(w, "preview nicht verfügbar", http.StatusNotFound) + return + } + + // Neu erzeugen + if err := genSem.Acquire(r.Context()); err != nil { + http.Error(w, "abgebrochen: "+err.Error(), http.StatusRequestTimeout) + return + } + defer genSem.Release() + + genCtx, cancel := context.WithTimeout(r.Context(), 3*time.Minute) + defer cancel() + + if err := generateTeaserClipsMP4(genCtx, outPath, previewPath, 1.0, 18); err != nil { + // Fallback: einzelner kurzer Teaser ab Anfang (trifft seltener kaputte Stellen) + if err2 := generateTeaserMP4(genCtx, outPath, previewPath, 0, 8); err2 != nil { + http.Error(w, "konnte preview nicht erzeugen: "+err.Error()+" (fallback ebenfalls fehlgeschlagen: "+err2.Error()+")", http.StatusInternalServerError) + return + } + } + + serveTeaserFile(w, r, previewPath) +} diff --git a/backend/preview_covers.go b/backend/preview_covers.go deleted file mode 100644 index 7257a57..0000000 --- a/backend/preview_covers.go +++ /dev/null @@ -1,1177 +0,0 @@ -package main - -import ( - "bytes" - "context" - "crypto/sha1" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "image" - "image/color" - "image/draw" - "image/jpeg" - "image/png" - "io" - "log" - "math/rand" - "net/http" - "net/url" - "os" - "path" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - "sync" - "time" - - "golang.org/x/image/font" - "golang.org/x/image/font/basicfont" - "golang.org/x/image/math/fixed" -) - -// -------------------------- -// Covers: generated/covers/. -// -------------------------- - -type coverInfo struct { - Category string `json:"category"` - Model string `json:"model,omitempty"` - Src string `json:"src,omitempty"` - GeneratedAt string `json:"generatedAt"` -} - -func normalizeCoverSrc(s string) string { - s = strings.TrimSpace(s) - if s == "" { - return "" - } - // Windows -> URL-artig - s2 := strings.ReplaceAll(s, "\\", "/") - - // Wenn es schon wie ein Web-Pfad aussieht, so lassen - if strings.HasPrefix(s2, "/generated/") || strings.HasPrefix(s2, "http://") || strings.HasPrefix(s2, "https://") { - return s2 - } - - // Wenn es ein lokaler Pfad ist, versuche den /generated/ Teil zu extrahieren - if i := strings.Index(s2, "/generated/"); i >= 0 { - return s2[i:] - } - return s2 -} - -func coverInfoPathForKey(key string) (string, error) { - root, err := coversRoot() - if err != nil { - return "", err - } - return filepath.Join(root, key+".info.json"), nil -} - -func writeCoverInfoBestEffort(key string, info coverInfo) { - p, err := coverInfoPathForKey(key) - if err != nil { - return - } - - b, err := json.MarshalIndent(info, "", " ") - if err != nil { - return - } - _ = os.MkdirAll(filepath.Dir(p), 0o755) - _ = os.WriteFile(p, b, 0o644) -} - -func readCoverInfoBestEffort(key string) (coverInfo, bool) { - p, err := coverInfoPathForKey(key) - if err != nil { - return coverInfo{}, false - } - b, err := os.ReadFile(p) - if err != nil || len(b) == 0 { - return coverInfo{}, false - } - var ci coverInfo - if json.Unmarshal(b, &ci) != nil { - return coverInfo{}, false - } - return ci, true -} - -func drawLabel(img draw.Image, text string) { - text = strings.TrimSpace(text) - if text == "" { - return - } - - face := basicfont.Face7x13 - - // Layout - const margin = 10 - const padX = 10 - const padY = 8 - - b := img.Bounds() - - // Max. verfügbare Breite für Text (ohne Padding/Margins) - maxTextW := (b.Dx() - 2*margin) - 2*padX - if maxTextW <= 0 { - return - } - - // Text ggf. kürzen, damit er ins Badge passt - measure := func(s string) int { - d := &font.Drawer{Face: face} - return d.MeasureString(s).Ceil() - } - - label := text - if w := measure(label); w > maxTextW { - ellipsis := "…" - rs := []rune(text) - - // harte Schranke gegen Extremfälle - if len(rs) == 0 { - return - } - - lo, hi := 0, len(rs) - best := "" - for lo <= hi { - mid := (lo + hi) / 2 - cand := string(rs[:mid]) + ellipsis - if measure(cand) <= maxTextW { - best = cand - lo = mid + 1 - } else { - hi = mid - 1 - } - } - - if best == "" { - // notfalls nur Ellipsis - label = ellipsis - } else { - label = best - } - } - - // Textmetriken - d := &font.Drawer{Face: face} - textW := d.MeasureString(label).Ceil() - textH := face.Metrics().Height.Ceil() - ascent := face.Metrics().Ascent.Ceil() - - // Badge-Box (unten links) - x0 := b.Min.X + margin - y1 := b.Max.Y - margin - y0 := y1 - (textH + 2*padY) - x1 := x0 + (textW + 2*padX) - - // Clamp nach rechts (falls Bild sehr schmal) - maxX1 := b.Max.X - margin - if x1 > maxX1 { - shift := x1 - maxX1 - x0 -= shift - x1 -= shift - if x0 < b.Min.X+margin { - x0 = b.Min.X + margin - x1 = maxX1 - } - } - - // Clamp nach oben (falls Bild sehr niedrig) - minY0 := b.Min.Y + margin - if y0 < minY0 { - y0 = minY0 - y1 = y0 + (textH + 2*padY) - if y1 > b.Max.Y-margin { - // zu wenig Platz insgesamt - return - } - } - - rect := image.Rect(x0, y0, x1, y1) - - // Background - bg := image.NewUniform(color.RGBA{0, 0, 0, 170}) - draw.Draw(img, rect, bg, image.Point{}, draw.Over) - - // Optional: dünner Rand für mehr Kontrast - border := image.NewUniform(color.RGBA{255, 255, 255, 35}) - // top - draw.Draw(img, image.Rect(rect.Min.X, rect.Min.Y, rect.Max.X, rect.Min.Y+1), border, image.Point{}, draw.Over) - // bottom - draw.Draw(img, image.Rect(rect.Min.X, rect.Max.Y-1, rect.Max.X, rect.Max.Y), border, image.Point{}, draw.Over) - // left - draw.Draw(img, image.Rect(rect.Min.X, rect.Min.Y, rect.Min.X+1, rect.Max.Y), border, image.Point{}, draw.Over) - // right - draw.Draw(img, image.Rect(rect.Max.X-1, rect.Min.Y, rect.Max.X, rect.Max.Y), border, image.Point{}, draw.Over) - - // Text baseline - tx := x0 + padX - ty := y0 + padY + ascent - - // Mini-Schatten (Lesbarkeit) - shadow := &font.Drawer{ - Dst: img, - Src: image.NewUniform(color.RGBA{0, 0, 0, 200}), - Face: face, - Dot: fixed.P(tx+1, ty+1), - } - shadow.DrawString(label) - - // Text - fg := &font.Drawer{ - Dst: img, - Src: image.NewUniform(color.RGBA{255, 255, 255, 235}), - Face: face, - Dot: fixed.P(tx, ty), - } - fg.DrawString(label) -} - -func splitTagsLoose(raw string) []string { - raw = strings.TrimSpace(raw) - if raw == "" { - return nil - } - parts := strings.FieldsFunc(raw, func(r rune) bool { - switch r { - case '\n', ',', ';', '|': - return true - } - return false - }) - out := make([]string, 0, len(parts)) - seen := map[string]struct{}{} - for _, p := range parts { - t := strings.TrimSpace(p) - if t == "" { - continue - } - low := strings.ToLower(t) - if _, ok := seen[low]; ok { - continue - } - seen[low] = struct{}{} - out = append(out, t) - } - return out -} - -func hasTag(tagsRaw string, want string) bool { - want = strings.ToLower(strings.TrimSpace(want)) - if want == "" { - return false - } - for _, t := range splitTagsLoose(tagsRaw) { - if strings.ToLower(strings.TrimSpace(t)) == want { - return true - } - } - return false -} - -// ✅ Passe diese Struct/Methoden an dein echtes ModelStore-API an. -type coverModel struct { - Key string // z.B. model key/name - Tags string // raw tags (csv/newline/…) -} - -func listModelsForCovers() ([]coverModel, error) { - if coverModelStore == nil { - return nil, fmt.Errorf("model store not set") - } - - ms := coverModelStore.List() // ✅ existiert bei dir - - out := make([]coverModel, 0, len(ms)) - for _, m := range ms { - key := strings.TrimSpace(m.ModelKey) - if key == "" { - continue - } - out = append(out, coverModel{ - Key: key, - Tags: m.Tags, - }) - } - return out, nil -} - -func pickRandomThumbForCategory(ctx context.Context, category string) (thumbPath string, err error) { - category = strings.TrimSpace(category) - if category == "" { - return "", fmt.Errorf("category empty") - } - - select { - case <-ctx.Done(): - return "", ctx.Err() - default: - } - - models, err := listModelsForCovers() - if err != nil { - return "", err - } - - // 1) Kandidaten-Models nach Tag filtern - cands := make([]coverModel, 0, 64) - for _, m := range models { - key := strings.TrimSpace(m.Key) - if key == "" { - continue - } - if hasTag(m.Tags, category) { - cands = append(cands, coverModel{Key: key, Tags: m.Tags}) - } - } - if len(cands) == 0 { - return "", fmt.Errorf("no model with tag") - } - - // 2) Kandidaten mischen und nacheinander probieren - rand.Shuffle(len(cands), func(i, j int) { cands[i], cands[j] = cands[j], cands[i] }) - - // 3) done dirs (einmalig auflösen) - s := getSettings() - doneAbs, derr := resolvePathRelativeToApp(s.DoneDir) - if derr != nil || strings.TrimSpace(doneAbs) == "" { - return "", fmt.Errorf("doneDir resolve failed: %v", derr) - } - - type candFile struct { - videoPath string - id string - } - - isVideo := func(name string) bool { - low := strings.ToLower(name) - if strings.Contains(low, ".part") || strings.Contains(low, ".tmp") { - return false - } - ext := strings.ToLower(filepath.Ext(name)) - return ext == ".mp4" || ext == ".ts" - } - - // 4) Für jedes passende Model: Dateien sammeln, random wählen, Thumb prüfen - for _, m := range cands { - select { - case <-ctx.Done(): - return "", ctx.Err() - default: - } - - modelKey := strings.TrimSpace(m.Key) - if modelKey == "" { - continue - } - - // Kandidaten: done// und done/keep// - dirs := []string{ - filepath.Join(doneAbs, modelKey), - filepath.Join(doneAbs, "keep", modelKey), - } - - files := make([]candFile, 0, 128) - - for _, d := range dirs { - ents, err := os.ReadDir(d) - if err != nil { - continue - } - for _, e := range ents { - if e.IsDir() { - continue - } - name := e.Name() - if !isVideo(name) { - continue - } - full := filepath.Join(d, name) - stem := strings.TrimSuffix(name, filepath.Ext(name)) - id := stripHotPrefix(strings.TrimSpace(stem)) - if id == "" { - continue - } - files = append(files, candFile{videoPath: full, id: id}) - } - } - - if len(files) == 0 { - continue - } - - cf := files[rand.Intn(len(files))] - - // thumbs sicherstellen (best effort) - _ = ensureAssetsForVideo(cf.videoPath) - - tp, terr := generatedThumbFile(cf.id) - if terr != nil { - continue - } - if fi, serr := os.Stat(tp); serr == nil && !fi.IsDir() && fi.Size() > 0 { - return tp, nil - } - } - - return "", fmt.Errorf("no downloads/thumbs for category") -} - -func coversRoot() (string, error) { - return resolvePathRelativeToApp(filepath.Join("generated", "covers")) -} - -func ensureCoversDir() (string, error) { - root, err := coversRoot() - if err != nil { - return "", err - } - if strings.TrimSpace(root) == "" { - return "", fmt.Errorf("covers root ist leer") - } - if err := os.MkdirAll(root, 0o755); err != nil { - return "", err - } - return root, nil -} - -var coverKeyRe = regexp.MustCompile(`[^a-z0-9._-]+`) - -func sanitizeCoverKey(category string) (string, error) { - c := strings.ToLower(strings.TrimSpace(category)) - - if c == "" { - sum := sha1.Sum([]byte(category)) - c = "tag_" + hex.EncodeToString(sum[:8]) // 16 hex chars reichen - } - - if c == "" { - return "", fmt.Errorf("category fehlt") - } - c = strings.ReplaceAll(c, " ", "_") - c = coverKeyRe.ReplaceAllString(c, "_") - c = strings.Trim(c, "._-") - if c == "" { - return "", fmt.Errorf("category ungültig") - } - if len(c) > 120 { - c = c[:120] - } - return c, nil -} - -func detectImageExt(contentType string, b []byte) (ext string, ct string) { - ct = strings.ToLower(strings.TrimSpace(contentType)) - - switch { - case strings.Contains(ct, "image/jpeg") || strings.Contains(ct, "image/jpg"): - return ".jpg", "image/jpeg" - case strings.Contains(ct, "image/png"): - return ".png", "image/png" - case strings.Contains(ct, "image/webp"): - return ".webp", "image/webp" - case strings.Contains(ct, "image/gif"): - return ".gif", "image/gif" - } - - // Magic bytes fallback - if len(b) >= 3 && b[0] == 0xFF && b[1] == 0xD8 && b[2] == 0xFF { - return ".jpg", "image/jpeg" - } - if len(b) >= 8 && bytes.Equal(b[:8], []byte{0x89, 'P', 'N', 'G', 0x0D, 0x0A, 0x1A, 0x0A}) { - return ".png", "image/png" - } - if len(b) >= 12 && string(b[:4]) == "RIFF" && string(b[8:12]) == "WEBP" { - return ".webp", "image/webp" - } - if len(b) >= 6 && (string(b[:6]) == "GIF87a" || string(b[:6]) == "GIF89a") { - return ".gif", "image/gif" - } - - return ".jpg", "image/jpeg" -} - -func coverPathForCategory(key string, ext string) (string, error) { - root, err := coversRoot() - if err != nil { - return "", err - } - if strings.TrimSpace(root) == "" { - return "", fmt.Errorf("covers root ist leer") - } - if ext == "" { - ext = ".jpg" - } - return filepath.Join(root, key+ext), nil -} - -func findExistingCoverFile(key string) (string, os.FileInfo, bool) { - root, err := coversRoot() - if err != nil || strings.TrimSpace(root) == "" { - return "", nil, false - } - exts := []string{".jpg", ".png", ".webp", ".gif"} - for _, ext := range exts { - p := filepath.Join(root, key+ext) - if fi, err := os.Stat(p); err == nil && !fi.IsDir() && fi.Size() > 0 { - return p, fi, true - } - } - return "", nil, false -} - -func downloadBytes(ctx context.Context, rawURL string, ua string) ([]byte, string, error) { - rawURL = strings.TrimSpace(rawURL) - if rawURL == "" { - return nil, "", fmt.Errorf("src fehlt") - } - - // ✅ 1) Lokaler Pfad: nur /generated/... erlauben - if strings.HasPrefix(rawURL, "/") { - clean := path.Clean(rawURL) - if !strings.HasPrefix(clean, "/generated/") { - return nil, "", fmt.Errorf("src ungültig") - } - if strings.Contains(clean, "..") { - return nil, "", fmt.Errorf("src ungültig") - } - - rel := strings.TrimPrefix(clean, "/") - abs, err := resolvePathRelativeToApp(rel) - if err != nil || strings.TrimSpace(abs) == "" { - return nil, "", fmt.Errorf("src ungültig") - } - - f, err := os.Open(abs) - if err != nil { - return nil, "", fmt.Errorf("download failed: %v", err) - } - defer f.Close() - - b, err := io.ReadAll(io.LimitReader(f, 10*1024*1024)) - if err != nil { - return nil, "", fmt.Errorf("download failed: %v", err) - } - if len(b) == 0 { - return nil, "", fmt.Errorf("download empty") - } - - ext := strings.ToLower(filepath.Ext(abs)) - ct := "application/octet-stream" - switch ext { - case ".jpg", ".jpeg": - ct = "image/jpeg" - case ".png": - ct = "image/png" - case ".webp": - ct = "image/webp" - case ".gif": - ct = "image/gif" - } - - return b, ct, nil - } - - // ✅ 2) Remote URL: nur http/https - u, err := url.Parse(rawURL) - if err != nil || u.Scheme == "" || u.Host == "" { - return nil, "", fmt.Errorf("src ungültig") - } - if u.Scheme != "http" && u.Scheme != "https" { - return nil, "", fmt.Errorf("src schema nicht erlaubt") - } - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, rawURL, nil) - if err != nil { - return nil, "", err - } - if strings.TrimSpace(ua) == "" { - ua = "Mozilla/5.0" - } - req.Header.Set("User-Agent", ua) - req.Header.Set("Accept", "image/*,*/*;q=0.8") - - client := &http.Client{Timeout: 12 * time.Second} - resp, err := client.Do(req) - if err != nil { - return nil, "", err - } - defer resp.Body.Close() - - if resp.StatusCode != http.StatusOK { - return nil, "", fmt.Errorf("download failed: HTTP %d", resp.StatusCode) - } - - b, err := io.ReadAll(io.LimitReader(resp.Body, 10*1024*1024)) - if err != nil { - return nil, "", err - } - if len(b) == 0 { - return nil, "", fmt.Errorf("download empty") - } - - return b, resp.Header.Get("Content-Type"), nil -} - -// irgendwo auf Package-Level - -var coverBatchMu sync.Mutex - -var ( - coverBatchInflight int - coverBatchStarted time.Time - - coverBatchTotal int - coverBatchForced int - coverBatchMiss int - coverBatchErrors int - coverBatchNoThumb int - coverBatchDecodeErr int -) - -func coverBatchEnter(force bool) { - coverBatchMu.Lock() - defer coverBatchMu.Unlock() - - if coverBatchInflight == 0 { - coverBatchStarted = time.Now() - coverBatchTotal = 0 - coverBatchForced = 0 - coverBatchMiss = 0 - coverBatchErrors = 0 - coverBatchNoThumb = 0 - coverBatchDecodeErr = 0 - - log.Printf("[cover] BATCH START") - } - - coverBatchInflight++ - coverBatchTotal++ - if force { - coverBatchForced++ - } else { - coverBatchMiss++ - } -} - -func coverBatchLeave(outcome string, status int) { - coverBatchMu.Lock() - defer coverBatchMu.Unlock() - - if status >= 400 { - coverBatchErrors++ - } - switch outcome { - case "no-thumb": - coverBatchNoThumb++ - case "decode-failed-no-overlay": - coverBatchDecodeErr++ - } - - coverBatchInflight-- - if coverBatchInflight <= 0 { - dur := time.Since(coverBatchStarted).Round(time.Millisecond) - log.Printf( - "[cover] BATCH END total=%d miss=%d forced=%d errors=%d noThumb=%d decodeFail=%d took=%s", - coverBatchTotal, - coverBatchMiss, - coverBatchForced, - coverBatchErrors, - coverBatchNoThumb, - coverBatchDecodeErr, - dur, - ) - - coverBatchInflight = 0 - } -} - -var ( - reModelFromStem = regexp.MustCompile(`^(.*?)_\d{1,2}_\d{1,2}_\d{4}__\d{1,2}-\d{2}-\d{2}`) -) - -// stem ist z.B. "sigmasian_01_21_2026__07-28-13" oder ein Parent-Dir-Name -func inferModelFromStem(stem string) string { - stem = stripHotPrefix(strings.TrimSpace(stem)) - if stem == "" { - return "" - } - m := reModelFromStem.FindStringSubmatch(stem) - if len(m) >= 2 { - return strings.TrimSpace(m[1]) - } - return "" -} - -// akzeptiert: -// - "/generated/meta//preview.webp" -// - "C:\...\generated\meta\\preview.webp" -// - "http(s)://host/generated/meta//preview.webp" -// - (fallback) irgendeinen Dateinamen-Stem, der wie "_MM_DD_YYYY__HH-MM-ss" aussieht -func inferModelFromThumbLike(srcOrPath string) string { - s := strings.TrimSpace(srcOrPath) - if s == "" { - return "" - } - - // Windows -> slash - s = strings.ReplaceAll(s, `\`, `/`) - - // Wenn URL: nimm nur den Path - if u, err := url.Parse(s); err == nil && u != nil && u.Scheme != "" && u.Host != "" { - s = u.Path - } - - // Wenn es wie "...//preview.webp" aussieht: parent dir ist - base := path.Base(s) - lb := strings.ToLower(base) - if strings.HasPrefix(lb, "thumbs.") { - id := path.Base(path.Dir(s)) - return inferModelFromStem(id) - } - - // Fallback: versuch Stem aus basename - stem := strings.TrimSuffix(base, path.Ext(base)) - return inferModelFromStem(stem) -} - -type coverInfoListItem struct { - Category string `json:"category"` - Model string `json:"model,omitempty"` - GeneratedAt string `json:"generatedAt,omitempty"` - HasCover bool `json:"hasCover"` -} - -func generatedCoverInfoList(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet && r.Method != http.MethodHead { - http.Error(w, "Nur GET/HEAD erlaubt", http.StatusMethodNotAllowed) - return - } - - root, err := coversRoot() - if err != nil { - http.Error(w, "covers root: "+err.Error(), http.StatusInternalServerError) - return - } - - entries, err := os.ReadDir(root) - if err != nil { - http.Error(w, "covers dir: "+err.Error(), http.StatusInternalServerError) - return - } - - byKey := map[string]*coverInfoListItem{} - - ensure := func(key string) *coverInfoListItem { - if v, ok := byKey[key]; ok { - return v - } - v := &coverInfoListItem{Category: key} - byKey[key] = v - return v - } - - isCoverExt := func(ext string) bool { - switch strings.ToLower(ext) { - case ".jpg", ".jpeg", ".png", ".webp", ".gif": - return true - default: - return false - } - } - - for _, e := range entries { - name := e.Name() - lower := strings.ToLower(name) - - // info.json - if strings.HasSuffix(lower, ".info.json") { - key := strings.TrimSuffix(name, ".info.json") - if ci, ok := readCoverInfoBestEffort(key); ok { - v := ensure(key) - if strings.TrimSpace(ci.Category) != "" { - v.Category = strings.TrimSpace(ci.Category) - } - if strings.TrimSpace(ci.Model) != "" { - v.Model = strings.TrimSpace(ci.Model) - } - if strings.TrimSpace(ci.GeneratedAt) != "" { - v.GeneratedAt = strings.TrimSpace(ci.GeneratedAt) - } - } - continue - } - - // cover image - ext := filepath.Ext(name) - if isCoverExt(ext) { - key := strings.TrimSuffix(name, ext) - v := ensure(key) - v.HasCover = true - } - } - - // ✅ WICHTIG: Model nur ausgeben, wenn wirklich ein Cover-Bild existiert - for _, v := range byKey { - if !v.HasCover { - v.Model = "" - v.GeneratedAt = "" - } - if strings.TrimSpace(v.Category) == "" { - v.Category = "" - } - } - - keys := make([]string, 0, len(byKey)) - for k := range byKey { - keys = append(keys, k) - } - sort.Strings(keys) - - out := make([]coverInfoListItem, 0, len(keys)) - for _, k := range keys { - out = append(out, *byKey[k]) - } - - w.Header().Set("Content-Type", "application/json; charset=utf-8") - w.Header().Set("Cache-Control", "no-store") - - if r.Method == http.MethodHead { - w.WriteHeader(http.StatusOK) - return - } - - _ = json.NewEncoder(w).Encode(out) -} - -func generatedCover(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet && r.Method != http.MethodHead { - http.Error(w, "Nur GET/HEAD erlaubt", http.StatusMethodNotAllowed) - return - } - - category := r.URL.Query().Get("category") - key, err := sanitizeCoverKey(category) - if err != nil { - http.Error(w, "category ungültig: "+err.Error(), http.StatusBadRequest) - return - } - - refresh := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("refresh"))) - force := refresh == "1" || refresh == "true" || refresh == "yes" - - // Optional: model overlay - modelQ := strings.TrimSpace(r.URL.Query().Get("model")) - modelExplicit := modelQ != "" - model := modelQ - - // Optional: src - src := strings.TrimSpace(r.URL.Query().Get("src")) - - fallbackModel := "" - if ci, ok := readCoverInfoBestEffort(key); ok { - if m := strings.TrimSpace(ci.Model); m != "" { - fallbackModel = m - } - } - - if model == "" { - model = fallbackModel - } - - if !modelExplicit && src != "" { - if m := inferModelFromThumbLike(src); m != "" { - model = m - } - } - - reqID := strconv.FormatInt(time.Now().UnixNano(), 36) - - setDebugHeaders := func(cache string) { - w.Header().Set("X-Cover-Key", key) - w.Header().Set("X-Cover-Category", category) - if model != "" { - w.Header().Set("X-Cover-Model", model) - } - w.Header().Set("X-Cover-Cache", cache) // HIT | MISS | FORCED - w.Header().Set("X-Request-Id", reqID) - } - - // 1) Cache hit: direkt von Disk (nur wenn nicht force) - if !force { - // Wenn model im Request/abgeleitet da ist: info.json muss existieren & gleich sein, sonst neu erzeugen - if model != "" { - if ci, ok := readCoverInfoBestEffort(key); ok { - if strings.TrimSpace(ci.Model) != model { - force = true - } - } else { - force = true - } - } - - if !force { - if p, fi, ok := findExistingCoverFile(key); ok { - setDebugHeaders("HIT") - - if model != "" { - ci, ok := readCoverInfoBestEffort(key) - if !ok { - ci = coverInfo{Category: category} - } - - ci.Category = category - ci.Model = strings.TrimSpace(model) - ci.GeneratedAt = time.Now().UTC().Format(time.RFC3339Nano) - - writeCoverInfoBestEffort(key, ci) - } - - w.Header().Set("Cache-Control", "public, max-age=31536000") - w.Header().Set("X-Content-Type-Options", "nosniff") - - ext := strings.ToLower(filepath.Ext(p)) - switch ext { - case ".png": - w.Header().Set("Content-Type", "image/png") - case ".webp": - w.Header().Set("Content-Type", "image/webp") - case ".gif": - w.Header().Set("Content-Type", "image/gif") - default: - w.Header().Set("Content-Type", "image/jpeg") - } - - if r.Method == http.MethodHead { - w.WriteHeader(http.StatusOK) - return - } - - f, err := os.Open(p) - if err != nil { - http.NotFound(w, r) - return - } - defer f.Close() - - http.ServeContent(w, r, filepath.Base(p), fi.ModTime(), f) - return - } - } - } - - cacheStatus := "MISS" - if force { - cacheStatus = "FORCED" - } - setDebugHeaders(cacheStatus) - - coverBatchEnter(force) - - start := time.Now() - status := http.StatusOK - outcome := "ok" - - defer func() { - w.Header().Set("X-Cover-Gen-Ms", strconv.FormatInt(time.Since(start).Milliseconds(), 10)) - coverBatchLeave(outcome, status) - }() - - if _, err := ensureCoversDir(); err != nil { - status = http.StatusInternalServerError - outcome = "covers-dir" - http.Error(w, "covers-dir nicht verfügbar: "+err.Error(), status) - return - } - - ctx, cancel := context.WithTimeout(r.Context(), 20*time.Second) - defer cancel() - - var ( - raw []byte - mimeType string - ext string - ) - - thumbPath := "" - usedSrc := "" - - if src != "" { - var derr error - raw, mimeType, derr = downloadBytes(ctx, src, r.Header.Get("User-Agent")) - usedSrc = normalizeCoverSrc(src) - if derr != nil { - status = http.StatusBadRequest - outcome = "src-download" - http.Error(w, "src download failed: "+derr.Error(), status) - return - } - ext, mimeType = detectImageExt(mimeType, raw) - if len(raw) == 0 { - status = http.StatusBadRequest - outcome = "src-empty" - http.Error(w, "src leer", status) - return - } - - if model == "" { - if m := inferModelFromThumbLike(src); m != "" { - model = m - w.Header().Set("X-Cover-Model", model) - } - } - - } else { - var perr error - thumbPath, perr = pickRandomThumbForCategory(ctx, category) - if perr != nil { - if p, fi, ok := findExistingCoverFile(key); ok { - outcome = "fallback-existing-cover" - status = http.StatusOK - - w.Header().Set("Cache-Control", "public, max-age=600") - w.Header().Set("X-Content-Type-Options", "nosniff") - - ext2 := strings.ToLower(filepath.Ext(p)) - switch ext2 { - case ".png": - w.Header().Set("Content-Type", "image/png") - case ".webp": - w.Header().Set("Content-Type", "image/webp") - case ".gif": - w.Header().Set("Content-Type", "image/gif") - default: - w.Header().Set("Content-Type", "image/jpeg") - } - - if r.Method == http.MethodHead { - w.WriteHeader(http.StatusOK) - return - } - - f, err := os.Open(p) - if err != nil { - servePreviewStatusSVG(w, "No Cover", status) - return - } - defer f.Close() - - http.ServeContent(w, r, filepath.Base(p), fi.ModTime(), f) - return - } - - outcome = "no-thumb" - status = http.StatusNotFound - if r.Method == http.MethodHead { - w.WriteHeader(status) - return - } - servePreviewStatusSVG(w, "No Cover", status) - return - } - usedSrc = normalizeCoverSrc(thumbPath) - - raw, err = os.ReadFile(thumbPath) - if err != nil || len(raw) == 0 { - status = http.StatusInternalServerError - outcome = "thumb-read" - http.Error(w, "cover read fehlgeschlagen", status) - return - } - - ext = ".jpg" - mimeType = "image/jpeg" - - if model == "" { - if m := inferModelFromThumbLike(thumbPath); m != "" { - model = m - w.Header().Set("X-Cover-Model", model) - } - } - } - - if !modelExplicit { - if m := inferModelFromThumbLike(usedSrc); m != "" { - model = m - w.Header().Set("X-Cover-Model", model) - } - } - - // ✅ 3) Overlay + Re-Encode (bei dir aktuell ohne extra label-call; decode nur) - img, _, derr := image.Decode(bytes.NewReader(raw)) - if derr == nil && img != nil { - rgba := image.NewRGBA(img.Bounds()) - draw.Draw(rgba, rgba.Bounds(), img, img.Bounds().Min, draw.Src) - - var buf bytes.Buffer - switch strings.ToLower(ext) { - case ".png": - _ = png.Encode(&buf, rgba) - raw = buf.Bytes() - ext = ".png" - mimeType = "image/png" - default: - _ = jpeg.Encode(&buf, rgba, &jpeg.Options{Quality: 85}) - raw = buf.Bytes() - ext = ".jpg" - mimeType = "image/jpeg" - } - } else { - outcome = "decode-failed-no-overlay" - } - - // 4) Vorherige Cover-Dateien entfernen - root, _ := coversRoot() - for _, e := range []string{".jpg", ".png", ".webp", ".gif"} { - _ = os.Remove(filepath.Join(root, key+e)) - } - _ = os.Remove(filepath.Join(root, key+".info.json")) - - // 5) Persistieren - dst, err := coverPathForCategory(key, ext) - if err != nil { - status = http.StatusInternalServerError - outcome = "cover-path" - http.Error(w, "cover path: "+err.Error(), status) - return - } - if err := atomicWriteFile(dst, raw); err != nil { - status = http.StatusInternalServerError - outcome = "cover-write" - http.Error(w, "cover write: "+err.Error(), status) - return - } - - // ✅ 6) info.json schreiben (best-effort) - writeCoverInfoBestEffort(key, coverInfo{ - Category: category, - Model: strings.TrimSpace(model), - Src: strings.TrimSpace(usedSrc), - GeneratedAt: time.Now().UTC().Format(time.RFC3339Nano), - }) - - // 7) Ausliefern - w.Header().Set("Cache-Control", "public, max-age=600") - w.Header().Set("Content-Type", mimeType) - w.Header().Set("X-Content-Type-Options", "nosniff") - w.Header().Set("X-Cover-Bytes", strconv.Itoa(len(raw))) - - if r.Method == http.MethodHead { - w.WriteHeader(http.StatusOK) - return - } - - w.WriteHeader(http.StatusOK) - _, _ = w.Write(raw) -} - -// (Optional) falls du es irgendwo nutzen willst -var errCoverNotSupported = errors.New("cover not supported") diff --git a/backend/preview_hls.go b/backend/preview_hls.go deleted file mode 100644 index 9dabdc2..0000000 --- a/backend/preview_hls.go +++ /dev/null @@ -1,393 +0,0 @@ -// backend\preview_hls.go - -package main - -import ( - "bytes" - "context" - "fmt" - "net/http" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" - "time" -) - -var previewFileRe = regexp.MustCompile(`^(index(_hq)?\.m3u8|seg_(low|hq)_\d+\.ts|seg_\d+\.ts)$`) - -func serveEmptyLiveM3U8(w http.ResponseWriter, r *http.Request) { - // Für Player: gültige Playlist statt 204 liefern - w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8") - w.Header().Set("Cache-Control", "no-store") - w.Header().Set("X-Content-Type-Options", "nosniff") - // Optional: Player/Proxy darf schnell retryen - w.Header().Set("Retry-After", "1") - - // Bei HEAD nur Header schicken - if r.Method == http.MethodHead { - w.WriteHeader(http.StatusOK) - return - } - - // Minimal gültige LIVE-Playlist (keine Segmente, kein ENDLIST) - // Viele Player bleiben damit im "loading", statt hart zu failen. - body := "#EXTM3U\n" + - "#EXT-X-VERSION:3\n" + - "#EXT-X-TARGETDURATION:2\n" + - "#EXT-X-MEDIA-SEQUENCE:0\n" - - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(body)) -} - -func stopPreview(job *RecordJob) { - jobsMu.Lock() - cmd := job.previewCmd - cancel := job.previewCancel - job.previewCmd = nil - job.previewCancel = nil - job.LiveThumbStarted = false - job.PreviewDir = "" - jobsMu.Unlock() - - if cancel != nil { - cancel() - } - if cmd != nil && cmd.Process != nil { - _ = cmd.Process.Kill() - } -} - -func servePreviewHLSFile(w http.ResponseWriter, r *http.Request, id, file string) { - file = strings.TrimSpace(file) - if file == "" || filepath.Base(file) != file || !previewFileRe.MatchString(file) { - http.Error(w, "ungültige file", http.StatusBadRequest) - return - } - - isIndex := file == "index.m3u8" || file == "index_hq.m3u8" - - jobsMu.Lock() - job, ok := jobs[id] - state := "" - if ok && job != nil { - state = strings.TrimSpace(job.PreviewState) - } - jobsMu.Unlock() - - // ========================= - // ✅ HEAD = nur Existenzcheck (kein hover nötig, kein Preview-Start) - // ========================= - if r.Method == http.MethodHead { - if !ok || job == nil { - w.WriteHeader(http.StatusNotFound) - return - } - if state == "private" { - w.WriteHeader(http.StatusForbidden) - return - } - if state == "offline" { - w.WriteHeader(http.StatusNotFound) - return - } - previewDir := strings.TrimSpace(job.PreviewDir) - if previewDir == "" { - w.WriteHeader(http.StatusNotFound) - return - } - p := filepath.Join(previewDir, file) - if st, err := os.Stat(p); err == nil && !st.IsDir() { - w.Header().Set("Cache-Control", "no-store") - w.WriteHeader(http.StatusOK) - return - } - w.WriteHeader(http.StatusNotFound) - return - } - - // ========================= - // ✅ NEU: Player darf Preview auch ohne Hover starten - // - Frontend hängt &play=1 an (empfohlen) - // - Wir akzeptieren zusätzlich: play=1 => treat as active - // ========================= - active := isHover(r) || strings.TrimSpace(r.URL.Query().Get("play")) == "1" - - if !active { - // Kein Hover/Play => niemals Live-HLS abgreifen - if isIndex { - serveEmptyLiveM3U8(w, r) - return - } - http.Error(w, "preview not active", http.StatusNotFound) - return - } - - // active => wenn Job unbekannt, sauber raus - if !ok || job == nil { - if isIndex { - serveEmptyLiveM3U8(w, r) - return - } - http.Error(w, "job nicht gefunden", http.StatusNotFound) - return - } - - // active => Preview starten/keepalive - ensurePreviewStarted(r, job) - touchPreview(job) - - // state ggf. nach Start nochmal lesen - jobsMu.Lock() - state = strings.TrimSpace(job.PreviewState) - jobsMu.Unlock() - - if state == "private" { - http.Error(w, "model private", http.StatusForbidden) - return - } - if state == "offline" { - http.Error(w, "model offline", http.StatusNotFound) - return - } - if state == "error" { - http.Error(w, "preview error", http.StatusServiceUnavailable) - return - } - - previewDir := strings.TrimSpace(job.PreviewDir) - if previewDir == "" { - if isIndex { - serveEmptyLiveM3U8(w, r) - return - } - http.Error(w, "preview nicht verfügbar", http.StatusNotFound) - return - } - - p := filepath.Join(previewDir, file) - - st, err := os.Stat(p) - if err != nil || st.IsDir() { - if isIndex { - serveEmptyLiveM3U8(w, r) - return - } - http.Error(w, "datei nicht gefunden", http.StatusNotFound) - return - } - - ext := strings.ToLower(filepath.Ext(p)) - - // ✅ common: always no-store - w.Header().Set("Cache-Control", "no-store") - // ✅ avoids some proxy buffering surprises (harmless if ignored) - w.Header().Set("X-Accel-Buffering", "no") - - // ========================= - // ✅ .m3u8: rewrite (klein, ReadFile ok) - // ========================= - if ext == ".m3u8" { - raw, err := os.ReadFile(p) - if err != nil { - http.Error(w, "m3u8 read failed", http.StatusInternalServerError) - return - } - - rewritten := rewriteM3U8(raw, id) - - w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8") - w.WriteHeader(http.StatusOK) - _, _ = w.Write(rewritten) - return - } - - // ========================= - // ✅ Segmente: robust streamen + Range-support - // ========================= - switch ext { - case ".ts": - w.Header().Set("Content-Type", "video/mp2t") - case ".m4s": - w.Header().Set("Content-Type", "video/iso.segment") - default: - w.Header().Set("Content-Type", "application/octet-stream") - } - - // ✅ Optional aber sehr hilfreich: - // liefere ein Segment erst aus, wenn es nicht mehr wächst (verhindert "hängende" große .ts) - if ext == ".ts" || ext == ".m4s" { - if !waitForStableFile(p, 2, 120*time.Millisecond) { - // Segment ist vermutlich noch im Schreiben -> lieber 404, Player retryt - http.Error(w, "segment not ready", http.StatusNotFound) - return - } - } - - f, err := os.Open(p) - if err != nil { - http.Error(w, "open failed", http.StatusNotFound) - return - } - defer f.Close() - - // ✅ ServeContent macht Range korrekt und streamt ohne ReadAll. - // name ist nur für logs/cache; modTime für If-Modified-Since etc. - http.ServeContent(w, r, file, st.ModTime(), f) - -} - -func waitForStableFile(path string, checks int, interval time.Duration) bool { - // returns true if size is stable across N checks - var last int64 = -1 - for i := 0; i < checks; i++ { - st, err := os.Stat(path) - if err != nil || st.IsDir() { - return false - } - sz := st.Size() - if last >= 0 && sz == last { - return true - } - last = sz - time.Sleep(interval) - } - // if we never saw stability, assume not ready - return false -} - -func classifyPreviewFFmpegStderr(stderr string) (state string, httpStatus int) { - s := strings.ToLower(stderr) - - // ffmpeg schreibt typischerweise: - // "HTTP error 403 Forbidden" oder "Server returned 403 Forbidden" - if strings.Contains(s, "403 forbidden") || strings.Contains(s, "http error 403") || strings.Contains(s, "server returned 403") { - return "private", http.StatusForbidden - } - - // "HTTP error 404 Not Found" oder "Server returned 404 Not Found" - if strings.Contains(s, "404 not found") || strings.Contains(s, "http error 404") || strings.Contains(s, "server returned 404") { - return "offline", http.StatusNotFound - } - - return "", 0 -} - -func startPreviewHLS(ctx context.Context, job *RecordJob, m3u8URL, previewDir, httpCookie, userAgent string) error { - if strings.TrimSpace(ffmpegPath) == "" { - return fmt.Errorf("kein ffmpeg gefunden – setze FFMPEG_PATH oder lege ffmpeg(.exe) neben das Backend") - } - - if err := os.MkdirAll(previewDir, 0755); err != nil { - return err - } - - // ✅ PreviewState reset (neuer Start) - jobsMu.Lock() - job.PreviewState = "" - job.PreviewStateAt = "" - job.PreviewStateMsg = "" - jobsMu.Unlock() - notifyJobsChanged() - - commonIn := []string{"-y"} - if strings.TrimSpace(userAgent) != "" { - commonIn = append(commonIn, "-user_agent", userAgent) - } - if strings.TrimSpace(httpCookie) != "" { - commonIn = append(commonIn, "-headers", fmt.Sprintf("Cookie: %s\r\n", httpCookie)) - } - commonIn = append(commonIn, "-i", m3u8URL) - - hqArgs := append(commonIn, - "-vf", "scale=480:-2", - "-c:v", "libx264", "-preset", "veryfast", "-tune", "zerolatency", - "-pix_fmt", "yuv420p", - "-profile:v", "main", - "-level", "3.1", - "-threads", "4", - - // GOP ~ 2s (bei 24fps). Optional force_key_frames zusätzlich. - "-g", "48", "-keyint_min", "48", "-sc_threshold", "0", - // optional, wenn du noch große Segmente bekommst: - // "-force_key_frames", "expr:gte(t,n_forced*2)", - - "-map", "0:v:0", - "-map", "0:a:0?", - "-c:a", "aac", "-b:a", "128k", "-ac", "2", - - "-f", "hls", - "-hls_time", "2", - "-hls_list_size", "6", - "-hls_allow_cache", "0", - - // ✅ wichtig: temp_file - "-hls_flags", "delete_segments+append_list+independent_segments+temp_file", - - "-hls_segment_filename", filepath.Join(previewDir, "seg_hq_%05d.ts"), - - // ✅ Empfehlung: weglassen (du rewritest ohnehin) - // "-hls_base_url", baseURL, - - filepath.Join(previewDir, "index_hq.m3u8"), - ) - - cmd := exec.CommandContext(ctx, ffmpegPath, hqArgs...) - var stderr bytes.Buffer - cmd.Stderr = &stderr - - jobsMu.Lock() - job.previewCmd = cmd - jobsMu.Unlock() - - go func() { - if err := previewSem.Acquire(ctx); err != nil { - jobsMu.Lock() - if job.previewCmd == cmd { - job.previewCmd = nil - } - jobsMu.Unlock() - return - } - defer previewSem.Release() - - if err := cmd.Run(); err != nil && ctx.Err() == nil { - st := strings.TrimSpace(stderr.String()) - - // ✅ 403/404 erkennen -> Private/Offline setzen - state, code := classifyPreviewFFmpegStderr(st) - - jobsMu.Lock() - if state != "" { - job.PreviewState = state - job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano) - job.PreviewStateMsg = fmt.Sprintf("ffmpeg input returned HTTP %d", code) - } else { - job.PreviewState = "error" - job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano) - if len(st) > 280 { - job.PreviewStateMsg = st[:280] + "…" - } else { - job.PreviewStateMsg = st - } - } - jobsMu.Unlock() - notifyJobsChanged() - - fmt.Printf("⚠️ preview hq ffmpeg failed: %v (%s)\n", err, st) - } - - jobsMu.Lock() - if job.previewCmd == cmd { - job.previewCmd = nil - } - jobsMu.Unlock() - }() - - // ✅ Live thumb writer starten (schreibt generated//preview.webp regelmäßig neu) - startLiveThumbWebPLoop(ctx, job) - - return nil -} diff --git a/backend/preview_m3u8_rewrite.go b/backend/preview_m3u8_rewrite.go deleted file mode 100644 index 5ab45f3..0000000 --- a/backend/preview_m3u8_rewrite.go +++ /dev/null @@ -1,100 +0,0 @@ -package main - -import ( - "bufio" - "bytes" - "net/url" - "path" - "strings" -) - -func rewriteM3U8(raw []byte, id string) []byte { - // Wir bauen alle URIs so um, dass sie wieder über /api/preview laufen. - // Wichtig: play=1 bleibt dran, damit Folge-Requests (segments, chunklists) auch ohne Hover gehen. - base := "/api/preview?id=" + url.QueryEscape(id) + "&file=" - - var out bytes.Buffer - sc := bufio.NewScanner(bytes.NewReader(raw)) - - // Scanner default token limit 64K – m3u8 ist normalerweise klein, passt. - // Wenn du riesige Playlists hast, kannst du Buffer erhöhen. - - for sc.Scan() { - line := sc.Text() - trim := strings.TrimSpace(line) - - if trim == "" { - out.WriteByte('\n') - continue - } - - // Kommentare/Tags: ggf. URI="..." in Tags rewriten - if strings.HasPrefix(trim, "#") { - // EXT-X-KEY:URI="..." - line = rewriteAttrURI(line, base) - out.WriteString(line) - out.WriteByte('\n') - continue - } - - // Nicht-Tag => URI (segment oder child-playlist) - u := trim - - // Absolut? dann lassen - if strings.HasPrefix(u, "http://") || strings.HasPrefix(u, "https://") { - out.WriteString(line) - out.WriteByte('\n') - continue - } - - // Wenn es schon unser API ist, lassen - if strings.Contains(u, "/api/preview") { - out.WriteString(line) - out.WriteByte('\n') - continue - } - - // Nur basename nehmen (ffmpeg schreibt i.d.R. keine Subdirs) - name := path.Base(u) - - // Hier play=1 mitschicken: - out.WriteString(base + url.QueryEscape(name) + "&play=1") - out.WriteByte('\n') - } - - if err := sc.Err(); err != nil { - // Wenn Scanner aus irgendeinem Grund scheitert: lieber raw zurück (besser als kaputt) - return raw - } - return out.Bytes() -} - -func rewriteAttrURI(line, base string) string { - // Rewritet URI="xyz" in EXT-X-KEY / EXT-X-MAP / EXT-X-MEDIA / EXT-X-I-FRAME-STREAM-INF etc. - // Nur relative URIs werden angefasst. - const key = `URI="` - i := strings.Index(line, key) - if i < 0 { - return line - } - - j := strings.Index(line[i+len(key):], `"`) - if j < 0 { - return line - } - - start := i + len(key) - end := start + j - val := line[start:end] - valTrim := strings.TrimSpace(val) - - // absolut oder schon preview => nix tun - if strings.HasPrefix(valTrim, "http://") || strings.HasPrefix(valTrim, "https://") || strings.Contains(valTrim, "/api/preview") { - return line - } - - name := path.Base(valTrim) - repl := base + url.QueryEscape(name) + "&play=1" - - return line[:start] + repl + line[end:] -} diff --git a/backend/preview_status_svg.go b/backend/preview_status_svg.go deleted file mode 100644 index eaf96da..0000000 --- a/backend/preview_status_svg.go +++ /dev/null @@ -1,86 +0,0 @@ -package main - -import ( - "html" - "net/http" - "strings" -) - -func servePreviewStatusSVG(w http.ResponseWriter, label string, status int) { - w.Header().Set("Content-Type", "image/svg+xml; charset=utf-8") - w.Header().Set("Cache-Control", "no-store") - w.Header().Set("X-Content-Type-Options", "nosniff") - - if status <= 0 { - status = http.StatusOK - } - - title := html.EscapeString(strings.TrimSpace(label)) - if title == "" { - title = "Preview" - } - - // 16:9 (passt zu deinen Cards) - svg := ` - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ` + title + ` - - Preview nicht verfügbar - -` - - w.WriteHeader(status) - _, _ = w.Write([]byte(svg)) -} diff --git a/backend/preview_teaser.go b/backend/preview_teaser.go deleted file mode 100644 index 7a76991..0000000 --- a/backend/preview_teaser.go +++ /dev/null @@ -1,455 +0,0 @@ -// backend\preview_teaser.go - -package main - -import ( - "bufio" - "bytes" - "context" - "fmt" - "math" - "os" - "os/exec" - "strconv" - "strings" - "time" -) - -// Minimale Segmentdauer, damit ffmpeg nicht mit zu kurzen Schnipseln zickt. -const minSegmentDuration = 0.75 // Sekunden -const defaultTeaserSegments = 12 - -type TeaserPreviewOptions struct { - Segments int - SegmentDuration float64 - - Width int - Preset string - CRF int - - // wird von uns "hart" auf true gesetzt (Audio ist NICHT optional) - Audio bool - AudioBitrate string - - UseVsync2 bool -} - -// stepSizeAndOffset verteilt die Startpunkte über das Video. -// Rückgabe: stepSize, offset (beide in Sekunden). -func (o TeaserPreviewOptions) stepSizeAndOffset(dur float64) (float64, float64) { - if dur <= 0 { - return 0, 0 - } - - n := o.Segments - if n < 1 { - n = 1 - } - - segDur := o.SegmentDuration - if segDur <= 0 { - segDur = 1 - } - if segDur < minSegmentDuration { - segDur = minSegmentDuration - } - - // letzter sinnvoller Start (kleiner Sicherheitsabstand) - maxStart := dur - 0.05 - segDur - if maxStart < 0 { - maxStart = 0 - } - - // 1 Segment -> Mitte - if n == 1 { - return 0, maxStart * 0.5 - } - - // kleine Ränder, damit nicht immer ganz am Anfang/Ende - margin := 0.05 * maxStart - if margin < 0 { - margin = 0 - } - span := maxStart - 2*margin - if span < 0 { - span = maxStart - margin = 0 - } - - step := 0.0 - if n > 1 { - step = span / float64(n-1) - } - return step, margin -} - -func generateTeaserClipsMP4(ctx context.Context, srcPath, outPath string, clipLenSec float64, maxClips int) error { - return generateTeaserClipsMP4WithProgress(ctx, srcPath, outPath, clipLenSec, maxClips, nil) -} - -func generateTeaserClipsMP4WithProgress( - ctx context.Context, - srcPath, outPath string, - clipLenSec float64, - maxClips int, - onRatio func(r float64), -) error { - // kompatible Defaults aus deiner Signatur -> Options - opts := TeaserPreviewOptions{ - Segments: maxClips, - SegmentDuration: clipLenSec, - - // stash-like Defaults - Width: 640, - Preset: "veryfast", - CRF: 21, - Audio: true, - AudioBitrate: "128k", - UseVsync2: false, - } - return generateTeaserPreviewMP4WithProgress(ctx, srcPath, outPath, opts, onRatio) -} - -func generateTeaserChunkMP4(ctx context.Context, src, out string, start, dur float64, opts TeaserPreviewOptions) error { - - // ✅ Audio ist Pflicht (nicht optional) - opts.Audio = true - - tmp := strings.TrimSuffix(out, ".mp4") + ".part.mp4" - segDur := dur - if segDur < minSegmentDuration { - segDur = minSegmentDuration - } - - args := []string{ - "-y", "-hide_banner", "-loglevel", "error", - } - args = append(args, ffmpegInputTol...) - args = append(args, - "-ss", fmt.Sprintf("%.3f", start), - "-t", fmt.Sprintf("%.3f", segDur), - "-i", src, - "-map", "0:v:0", - "-c:v", "libx264", - "-pix_fmt", "yuv420p", - "-profile:v", "high", - "-level", "4.2", - "-preset", opts.Preset, - "-crf", strconv.Itoa(opts.CRF), - "-threads", "4", - ) - - if opts.UseVsync2 { - args = append(args, "-vsync", "2") - } - - if opts.Audio { - args = append(args, - "-map", "0:a:0", // Audio Pflicht - "-c:a", "aac", - "-b:a", opts.AudioBitrate, - "-ac", "2", - "-shortest", - ) - } else { - args = append(args, "-an") - } - - args = append(args, "-movflags", "+faststart", tmp) - - cmd := exec.CommandContext(ctx, ffmpegPath, args...) - var stderr bytes.Buffer - cmd.Stderr = &stderr - if err := cmd.Run(); err != nil { - _ = os.Remove(tmp) - return fmt.Errorf("ffmpeg teaser chunk failed: %v (%s)", err, strings.TrimSpace(stderr.String())) - } - _ = os.Remove(out) - return os.Rename(tmp, out) -} - -func computeTeaserStarts(dur float64, opts TeaserPreviewOptions) (starts []float64, segDur float64, usedSegments int) { - // opts normalisieren wie in generateTeaserPreviewMP4WithProgress - if opts.SegmentDuration <= 0 { - opts.SegmentDuration = 1 - } - if opts.Segments <= 0 { - opts.Segments = defaultTeaserSegments - } - segDur = opts.SegmentDuration - if segDur < minSegmentDuration { - segDur = minSegmentDuration - } - - // Kurzvideo-Fallback: wenn Video kürzer als Segments*SegmentDuration -> 1 Segment über ganze Dauer - if dur > 0 && dur < segDur*float64(opts.Segments) { - opts.Segments = 1 - segDur = dur - } - - usedSegments = opts.Segments - - // Dauer unbekannt: Start 0 - if !(dur > 0) { - return []float64{0}, segDur, 1 - } - - stepSize, offset := opts.stepSizeAndOffset(dur) - - starts = make([]float64, 0, opts.Segments) - for i := 0; i < opts.Segments; i++ { - t := offset + float64(i)*stepSize - maxStart := math.Max(0, dur-0.05-segDur) - - if t < 0 { - t = 0 - } - if t > maxStart { - t = maxStart - } - if t < 0.05 { - t = 0.05 - } - starts = append(starts, t) - } - return starts, segDur, usedSegments -} - -func generateTeaserPreviewMP4WithProgress( - ctx context.Context, - srcPath, outPath string, - opts TeaserPreviewOptions, - onRatio func(r float64), -) error { - - // ✅ Audio ist Pflicht (nicht optional) - opts.Audio = true - - // Defaults - if opts.SegmentDuration <= 0 { - opts.SegmentDuration = 1 - } - if opts.Segments <= 0 { - opts.Segments = defaultTeaserSegments - } - if opts.Width <= 0 { - opts.Width = 640 - } - if opts.Preset == "" { - opts.Preset = "veryfast" - } - if opts.CRF <= 0 { - opts.CRF = 21 - } - if opts.AudioBitrate == "" { - opts.AudioBitrate = "128k" - } - segDur := opts.SegmentDuration - if segDur < minSegmentDuration { - segDur = minSegmentDuration - } - - // Dauer holen (einmalig; wird gecached) - dur, _ := durationSecondsCached(ctx, srcPath) - - // Kurzvideo-Fallback wie "die andere": - // Wenn Video kürzer als Segments*SegmentDuration -> Single Preview über komplette Dauer - if dur > 0 && dur < segDur*float64(opts.Segments) { - // als 1 Segment behandeln, Duration = dur - opts.Segments = 1 - segDur = dur - } - - // Wenn Dauer unbekannt/zu klein: ab 0 ein Stück - if !(dur > 0) { - if onRatio != nil { - onRatio(0) - } - // hier könntest du auch segDur verwenden; ich nehme min(8, segDur) ähnlich wie vorher - err := generateTeaserChunkMP4(ctx, srcPath, outPath, 0, math.Min(8, segDur), opts) - if onRatio != nil { - onRatio(1) - } - return err - } - - starts, segDurComputed, _ := computeTeaserStarts(dur, opts) - // segDur ist später im Code benutzt -> segDur damit überschreiben: - segDur = segDurComputed - - expectedOutSec := float64(len(starts)) * segDur - tmp := strings.TrimSuffix(outPath, ".mp4") + ".part.mp4" - - args := []string{ - "-y", - "-nostats", - "-progress", "pipe:1", - "-hide_banner", - "-loglevel", "error", - } - - // Inputs: pro Segment eigener -ss/-t/-i (wie bei dir) - for _, t := range starts { - args = append(args, ffmpegInputTol...) - args = append(args, - "-ss", fmt.Sprintf("%.3f", t), - "-t", fmt.Sprintf("%.3f", segDur), - "-i", srcPath, - ) - } - - // filter_complex bauen - var fc strings.Builder - for i := range starts { - // stash-like: ScaleWidth(640), pix_fmt yuv420p, profile high/level 4.2 später in output args - fmt.Fprintf(&fc, - "[%d:v]scale=%d:-2,setsar=1,setpts=PTS-STARTPTS[v%d];", - i, opts.Width, i, - ) - - if opts.Audio { - // dein “concat-safe” Audio normalisieren (gute Idee) - fmt.Fprintf(&fc, - "[%d:a]aresample=48000,aformat=channel_layouts=stereo,asetpts=PTS-STARTPTS[a%d];", - i, i, - ) - } - } - - // interleaved concat inputs - for i := range starts { - if opts.Audio { - fmt.Fprintf(&fc, "[v%d][a%d]", i, i) - } else { - fmt.Fprintf(&fc, "[v%d]", i) - } - } - - if opts.Audio { - fmt.Fprintf(&fc, "concat=n=%d:v=1:a=1[v][a]", len(starts)) - } else { - fmt.Fprintf(&fc, "concat=n=%d:v=1:a=0[v]", len(starts)) - } - - args = append(args, "-filter_complex", fc.String()) - - // map outputs - args = append(args, "-map", "[v]") - if opts.Audio { - args = append(args, "-map", "[a]") - } - - // Video encode (stash-like) - args = append(args, - "-c:v", "libx264", - "-pix_fmt", "yuv420p", - "-profile:v", "high", - "-level", "4.2", - "-preset", opts.Preset, - "-crf", strconv.Itoa(opts.CRF), - "-threads", "4", - ) - - if opts.UseVsync2 { - args = append(args, "-vsync", "2") - } - - // Audio encode optional (stash-like 128k), plus dein -ac 2 - if opts.Audio { - args = append(args, - "-c:a", "aac", - "-b:a", opts.AudioBitrate, - "-ac", "2", - "-shortest", - ) - } - - args = append(args, "-movflags", "+faststart", tmp) - - cmd := exec.CommandContext(ctx, ffmpegPath, args...) - - stdout, err := cmd.StdoutPipe() - if err != nil { - return err - } - - var stderr bytes.Buffer - cmd.Stderr = &stderr - - if err := cmd.Start(); err != nil { - return err - } - - sc := bufio.NewScanner(stdout) - sc.Buffer(make([]byte, 0, 64*1024), 1024*1024) - - var lastSent float64 - var lastAt time.Time - - send := func(outSec float64, force bool) { - if onRatio == nil { - return - } - if expectedOutSec > 0 && outSec > 0 { - r := outSec / expectedOutSec - if r < 0 { - r = 0 - } - if r > 1 { - r = 1 - } - if r-lastSent < 0.01 && !force { - return - } - if !lastAt.IsZero() && time.Since(lastAt) < 150*time.Millisecond && !force { - return - } - lastSent = r - lastAt = time.Now() - onRatio(r) - return - } - if force { - onRatio(1) - } - } - - var outSec float64 - - for sc.Scan() { - line := strings.TrimSpace(sc.Text()) - if line == "" { - continue - } - parts := strings.SplitN(line, "=", 2) - if len(parts) != 2 { - continue - } - k, v := parts[0], parts[1] - - switch k { - case "out_time_ms": - if n, perr := strconv.ParseInt(strings.TrimSpace(v), 10, 64); perr == nil && n > 0 { - outSec = float64(n) / 1_000_000.0 - send(outSec, false) - } - case "out_time": - if s := parseFFmpegOutTime(v); s > 0 { - outSec = s - send(outSec, false) - } - case "progress": - if strings.TrimSpace(v) == "end" { - send(outSec, true) - } - } - } - - if err := cmd.Wait(); err != nil { - _ = os.Remove(tmp) - return fmt.Errorf("ffmpeg teaser preview failed: %v (%s)", err, strings.TrimSpace(stderr.String())) - } - - _ = os.Remove(outPath) - return os.Rename(tmp, outPath) -} diff --git a/backend/preview_webp.go b/backend/preview_webp.go deleted file mode 100644 index 90c155e..0000000 --- a/backend/preview_webp.go +++ /dev/null @@ -1,728 +0,0 @@ -// backend\preview_webp.go - -package main - -import ( - "bytes" - "context" - "fmt" - "net/http" - "os" - "os/exec" - "path/filepath" - "strconv" - "strings" - "time" -) - -// ------------------------------------------------------------ -// Frame extraction helpers (WebP only) -// ------------------------------------------------------------ - -// extractLastFrameWebP extrahiert ein WebP aus dem letzten Frame der Datei. -func extractLastFrameWebP(path string) ([]byte, error) { - cmd := exec.Command( - ffmpegPath, - "-hide_banner", - "-loglevel", "error", - "-sseof", "-0.1", - "-i", path, - "-frames:v", "1", - "-vf", "scale=720:-2", - "-quality", "75", - "-f", "image2pipe", - "-vcodec", "libwebp", - "pipe:1", - ) - - var out bytes.Buffer - var stderr bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("ffmpeg last-frame webp: %w (%s)", err, strings.TrimSpace(stderr.String())) - } - - b := out.Bytes() - if len(b) == 0 { - return nil, fmt.Errorf("ffmpeg last-frame webp: empty output") - } - return b, nil -} - -// extractFrameAtTimeWebP extrahiert ein WebP an einer Zeitposition (Sekunden). -func extractFrameAtTimeWebP(path string, seconds float64) ([]byte, error) { - if seconds < 0 { - seconds = 0 - } - seek := fmt.Sprintf("%.3f", seconds) - - cmd := exec.Command( - ffmpegPath, - "-hide_banner", - "-loglevel", "error", - "-ss", seek, - "-i", path, - "-frames:v", "1", - "-vf", "scale=720:-2", - "-quality", "75", - "-f", "image2pipe", - "-vcodec", "libwebp", - "pipe:1", - ) - - var out bytes.Buffer - var stderr bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("ffmpeg frame-at-time webp: %w (%s)", err, strings.TrimSpace(stderr.String())) - } - - b := out.Bytes() - if len(b) == 0 { - return nil, fmt.Errorf("ffmpeg frame-at-time webp: empty output") - } - return b, nil -} - -// extractLastFrameWebPScaled extrahiert ein WebP aus dem letzten Frame und skaliert auf width (Höhe automatisch). -// quality: 0..100 (ffmpeg -quality) -func extractLastFrameWebPScaled(path string, width int, quality int) ([]byte, error) { - if width <= 0 { - width = 320 - } - if quality <= 0 || quality > 100 { - quality = 70 - } - - cmd := exec.Command( - ffmpegPath, - "-hide_banner", "-loglevel", "error", - "-sseof", "-0.25", - "-i", path, - "-frames:v", "1", - "-vf", fmt.Sprintf("scale=%d:-2", width), - "-quality", strconv.Itoa(quality), - "-f", "image2pipe", - "-vcodec", "libwebp", - "pipe:1", - ) - - var out bytes.Buffer - var stderr bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("ffmpeg last-frame scaled webp: %w (%s)", err, strings.TrimSpace(stderr.String())) - } - - b := out.Bytes() - if len(b) == 0 { - return nil, fmt.Errorf("ffmpeg last-frame scaled webp: empty output") - } - return b, nil -} - -// extractFirstFrameWebPScaled extrahiert ein WebP aus dem ersten Frame und skaliert auf width. -func extractFirstFrameWebPScaled(path string, width int, quality int) ([]byte, error) { - if width <= 0 { - width = 320 - } - if quality <= 0 || quality > 100 { - quality = 70 - } - - cmd := exec.Command( - ffmpegPath, - "-hide_banner", "-loglevel", "error", - "-ss", "0", - "-i", path, - "-frames:v", "1", - "-vf", fmt.Sprintf("scale=%d:-2", width), - "-quality", strconv.Itoa(quality), - "-f", "image2pipe", - "-vcodec", "libwebp", - "pipe:1", - ) - - var out bytes.Buffer - var stderr bytes.Buffer - cmd.Stdout = &out - cmd.Stderr = &stderr - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("ffmpeg first-frame scaled webp: %w (%s)", err, strings.TrimSpace(stderr.String())) - } - - b := out.Bytes() - if len(b) == 0 { - return nil, fmt.Errorf("ffmpeg first-frame scaled webp: empty output") - } - return b, nil -} - -// sucht das "neueste" Preview-Segment (seg_low_XXXXX.ts / seg_hq_XXXXX.ts) -func latestPreviewSegment(previewDir string) (string, error) { - entries, err := os.ReadDir(previewDir) - if err != nil { - return "", err - } - - var best string - for _, e := range entries { - if e.IsDir() { - continue - } - name := e.Name() - if !strings.HasPrefix(name, "seg_low_") && !strings.HasPrefix(name, "seg_hq_") { - continue - } - if best == "" || name > best { - best = name - } - } - - if best == "" { - return "", fmt.Errorf("kein Preview-Segment in %s", previewDir) - } - return filepath.Join(previewDir, best), nil -} - -// extractLastFrameFromPreviewDirThumbWebP erzeugt ein kleines WebP aus dem letzten Preview-Segment. -func extractLastFrameFromPreviewDirThumbWebP(previewDir string) ([]byte, error) { - seg, err := latestPreviewSegment(previewDir) - if err != nil { - return nil, err - } - - // low-res, notfalls fallback auf erstes Frame - img, err := extractLastFrameWebPScaled(seg, 320, 70) - if err == nil && len(img) > 0 { - return img, nil - } - return extractFirstFrameWebPScaled(seg, 320, 70) -} - -// extractLastFrameFromPreviewDirWebP erzeugt ein WebP aus dem letzten Preview-Segment. -func extractLastFrameFromPreviewDirWebP(previewDir string) ([]byte, error) { - seg, err := latestPreviewSegment(previewDir) - if err != nil { - return nil, err - } - - img, err := extractLastFrameWebP(seg) - if err != nil { - // extractFirstFrameWebP muss bei dir existieren oder du implementierst es analog wie oben; - // wenn du es nicht hast, nimm scaled-first als fallback. - return extractFirstFrameWebPScaled(seg, 720, 75) - } - return img, nil -} - -// ------------------------------------------------------------ -// Preview serving (webp only) -// ------------------------------------------------------------ - -func serveLivePreviewWebPFile(w http.ResponseWriter, r *http.Request, path string) { - f, err := os.Open(path) - if err != nil { - http.NotFound(w, r) - return - } - defer f.Close() - - st, err := f.Stat() - if err != nil || st.IsDir() || st.Size() == 0 { - http.NotFound(w, r) - return - } - - w.Header().Set("Content-Type", "image/webp") - w.Header().Set("Cache-Control", "no-store") - http.ServeContent(w, r, "preview.webp", st.ModTime(), f) -} - -func servePreviewWebPFile(w http.ResponseWriter, r *http.Request, path string) { - f, err := os.Open(path) - if err != nil { - http.NotFound(w, r) - return - } - defer f.Close() - - st, err := f.Stat() - if err != nil || st.IsDir() || st.Size() == 0 { - http.NotFound(w, r) - return - } - - w.Header().Set("Content-Type", "image/webp") - // finished previews dürfen cachen - w.Header().Set("Cache-Control", "public, max-age=600") - http.ServeContent(w, r, filepath.Base(path), st.ModTime(), f) -} - -func servePreviewWebPBytes(w http.ResponseWriter, b []byte) { - if len(b) == 0 { - w.WriteHeader(http.StatusNoContent) - return - } - w.Header().Set("Content-Type", "image/webp") - w.Header().Set("Cache-Control", "public, max-age=60") - w.WriteHeader(http.StatusOK) - _, _ = w.Write(b) -} - -func serveLivePreviewWebPBytes(w http.ResponseWriter, b []byte) { - if len(b) == 0 { - w.Header().Set("Cache-Control", "no-store") - w.WriteHeader(http.StatusNoContent) - return - } - w.Header().Set("Content-Type", "image/webp") - w.Header().Set("Cache-Control", "no-store") - w.WriteHeader(http.StatusOK) - _, _ = w.Write(b) -} - -// ------------------------------------------------------------ -// Preview alias: preview.webp / preview.webp (webp only) -// ------------------------------------------------------------ - -func servePreviewWebPAlias(w http.ResponseWriter, r *http.Request, id string) { - // 1) Wenn Job bekannt (id = job.ID): assetID aus Output ableiten - jobsMu.Lock() - job := jobs[id] - jobsMu.Unlock() - - if job != nil { - assetID := assetIDForJob(job) - if assetID != "" { - if webpPath, err := generatedThumbWebPFile(assetID); err == nil { - if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 { - if job.Status == JobRunning { - serveLivePreviewWebPFile(w, r, webpPath) - } else { - servePreviewWebPFile(w, r, webpPath) - } - return - } - } - } - - // Optional: running in-memory fallback (nur WebP) - if job.Status == JobRunning { - job.previewMu.Lock() - cached := job.previewWebp - job.previewMu.Unlock() - if len(cached) > 0 { - serveLivePreviewWebPBytes(w, cached) - return - } - } - - servePreviewStatusSVG(w, "Preview", http.StatusOK) - return - } - - // 2) Kein Job im RAM: id als assetID behandeln (finished files nach Neustart) - assetID := stripHotPrefix(strings.TrimSpace(id)) - if assetID == "" { - http.NotFound(w, r) - return - } - - if webpPath, err := generatedThumbWebPFile(assetID); err == nil { - if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 { - servePreviewWebPFile(w, r, webpPath) - return - } - } - - http.NotFound(w, r) -} - -func isHover(r *http.Request) bool { - v := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("hover"))) - return v == "1" || v == "true" || v == "yes" -} - -func touchPreview(job *RecordJob) { - if job == nil { - return - } - jobsMu.Lock() - job.previewLastHit = time.Now() - jobsMu.Unlock() -} - -func ensurePreviewStarted(r *http.Request, job *RecordJob) { - if job == nil { - return - } - job.previewStartMu.Lock() - defer job.previewStartMu.Unlock() - - jobsMu.Lock() - // läuft schon? - if job.previewCmd != nil && job.PreviewDir != "" { - job.previewLastHit = time.Now() - jobsMu.Unlock() - return - } - - // brauchen M3U8 URL - m3u8 := strings.TrimSpace(job.PreviewM3U8) - cookie := strings.TrimSpace(job.PreviewCookie) - ua := strings.TrimSpace(job.PreviewUA) - jobsMu.Unlock() - - if m3u8 == "" { - return - } - - // eigener Context für Preview (WICHTIG: nicht der Recording ctx) - pctx, cancel := context.WithCancel(context.Background()) - - // PreviewDir temp - assetID := assetIDForJob(job) - pdir := filepath.Join(os.TempDir(), "rec_preview", assetID) - - jobsMu.Lock() - job.PreviewDir = pdir - job.previewCancel = cancel - job.previewLastHit = time.Now() - jobsMu.Unlock() - - _ = startPreviewHLS(pctx, job, m3u8, pdir, cookie, ua) -} - -func recordPreview(w http.ResponseWriter, r *http.Request) { - // nur GET/HEAD erlauben - if r.Method != http.MethodGet && r.Method != http.MethodHead { - http.Error(w, "method not allowed", http.StatusMethodNotAllowed) - return - } - - id := strings.TrimSpace(r.URL.Query().Get("id")) - if id == "" { - // Alias: Frontend schickt "name" - id = strings.TrimSpace(r.URL.Query().Get("name")) - } - if id == "" { - http.Error(w, "id fehlt", http.StatusBadRequest) - return - } - - // Image / HLS file requests abfangen - if file := strings.TrimSpace(r.URL.Query().Get("file")); file != "" { - low := strings.ToLower(file) - // ✅ NUR WEBP - if low == "preview.webp" || low == "preview.webp" { - servePreviewWebPAlias(w, r, id) - return - } - // HLS wie gehabt - servePreviewHLSFile(w, r, id, file) - return - } - - // Schauen, ob wir einen Job mit dieser ID kennen (laufend oder gerade fertig) - jobsMu.Lock() - job, ok := jobs[id] - jobsMu.Unlock() - - if ok { - // ✅ 0) Running: wenn generated//preview.webp existiert -> sofort ausliefern - // (kein ffmpeg pro HTTP-Request) - if job.Status == JobRunning { - assetID := assetIDForJob(job) - if assetID != "" { - if webpPath, err := generatedThumbWebPFile(assetID); err == nil { - if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 { - serveLivePreviewWebPFile(w, r, webpPath) - return - } - } - } - } - - // ✅ Fallback: In-Memory-Cache (falls preview.webp noch nicht da ist) - job.previewMu.Lock() - cached := job.previewWebp - cachedAt := job.previewWebpAt - freshWindow := 8 * time.Second - fresh := len(cached) > 0 && !cachedAt.IsZero() && time.Since(cachedAt) < freshWindow - - // Wenn nicht frisch, ggf. im Hintergrund aktualisieren (einmal gleichzeitig) - if !fresh && !job.previewGen { - job.previewGen = true - go func(j *RecordJob, jobID string) { - defer func() { - j.previewMu.Lock() - j.previewGen = false - j.previewMu.Unlock() - }() - - var img []byte - var genErr error - - // 1) aus Preview-Segmenten - previewDir := strings.TrimSpace(j.PreviewDir) - if previewDir != "" { - img, genErr = extractLastFrameFromPreviewDirWebP(previewDir) - } - - // 2) Fallback: aus der Ausgabedatei - if genErr != nil || len(img) == 0 { - outPath := strings.TrimSpace(j.Output) - if outPath != "" { - outPath = filepath.Clean(outPath) - if !filepath.IsAbs(outPath) { - if abs, err := resolvePathRelativeToApp(outPath); err == nil { - outPath = abs - } - } - if fi, err := os.Stat(outPath); err == nil && !fi.IsDir() && fi.Size() > 0 { - img, genErr = extractLastFrameWebP(outPath) - if genErr != nil { - // fallback: erster Frame skaliert - img, _ = extractFirstFrameWebPScaled(outPath, 720, 75) - } - } - } - } - - if len(img) > 0 { - j.previewMu.Lock() - j.previewWebp = img - j.previewWebpAt = time.Now() - j.previewMu.Unlock() - } - }(job, id) - } - - // Wir liefern entweder ein frisches Bild, oder das zuletzt gecachte. - out := cached - job.previewMu.Unlock() - if len(out) > 0 { - serveLivePreviewWebPBytes(w, out) // no-store für laufende Jobs - return - } - - // Wenn Preview definitiv nicht geht -> Placeholder statt 204 - jobsMu.Lock() - state := strings.TrimSpace(job.PreviewState) - jobsMu.Unlock() - - if state == "private" { - servePreviewStatusSVG(w, "Private", http.StatusOK) - return - } - if state == "offline" { - servePreviewStatusSVG(w, "Offline", http.StatusOK) - return - } - - // noch kein Bild verfügbar -> 204 (Frontend zeigt Placeholder und retry) - w.Header().Set("Cache-Control", "no-store") - w.WriteHeader(http.StatusNoContent) - return - } - - // Kein Job im RAM → id als Dateistamm für fertige Downloads behandeln - servePreviewForFinishedFile(w, r, id) -} - -// ------------------------------------------------------------ -// Live thumbs generator (WebP) -// ------------------------------------------------------------ - -func updateLiveThumbWebPOnce(ctx context.Context, job *RecordJob) { - // Snapshot unter Lock holen - jobsMu.Lock() - status := job.Status - previewDir := job.PreviewDir - out := job.Output - jobsMu.Unlock() - - if status != JobRunning { - return - } - - // Zielpfad: generated//preview.webp - assetID := assetIDForJob(job) - thumbPath, err := generatedThumbWebPFile(assetID) - if err != nil { - return - } - - // Wenn frisch genug: skip - if st, err := os.Stat(thumbPath); err == nil && st.Size() > 0 { - if time.Since(st.ModTime()) < 10*time.Second { - return - } - } - - // Concurrency limit über thumbSem - if thumbSem != nil { - thumbCtx, cancel := context.WithTimeout(ctx, 3*time.Second) - defer cancel() - - if err := thumbSem.Acquire(thumbCtx); err != nil { - return - } - defer thumbSem.Release() - } - - var img []byte - - // 1) bevorzugt aus Preview-Segmenten - if previewDir != "" { - if b, err := extractLastFrameFromPreviewDirThumbWebP(previewDir); err == nil && len(b) > 0 { - img = b - } - } - - // 2) fallback aus Output-Datei - if len(img) == 0 && out != "" { - if b, err := extractLastFrameWebPScaled(out, 320, 70); err == nil && len(b) > 0 { - img = b - } - } - - if len(img) == 0 { - return - } - - _ = atomicWriteFile(thumbPath, img) -} - -func startLiveThumbWebPLoop(ctx context.Context, job *RecordJob) { - // einmalig starten - jobsMu.Lock() - if job.LiveThumbStarted { - jobsMu.Unlock() - return - } - job.LiveThumbStarted = true - jobsMu.Unlock() - - go func() { - // sofort einmal versuchen - updateLiveThumbWebPOnce(ctx, job) - - for { - delay := 10 * time.Second - - select { - case <-ctx.Done(): - return - case <-time.After(delay): - // Stoppen, sobald Job nicht mehr läuft - jobsMu.Lock() - st := job.Status - jobsMu.Unlock() - if st != JobRunning { - return - } - updateLiveThumbWebPOnce(ctx, job) - } - } - }() -} - -// ------------------------------------------------------------ -// Finished file preview (WebP only, no legacy jpg migration) -// ------------------------------------------------------------ - -func servePreviewForFinishedFile(w http.ResponseWriter, r *http.Request, id string) { - var err error - id, err = sanitizeID(id) - if err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return - } - - outPath, err := findFinishedFileByID(id) - if err != nil { - http.Error(w, "preview nicht verfügbar", http.StatusNotFound) - return - } - - if err := ensureGeneratedDirs(); err != nil { - http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) - return - } - - // Assets immer auf "basename ohne HOT" ablegen - assetID := stripHotPrefix(id) - if assetID == "" { - assetID = id - } - - assetDir, err := ensureGeneratedDir(assetID) - if err != nil { - http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError) - return - } - - // Frame-Caching für t=... (WebP) - if tStr := strings.TrimSpace(r.URL.Query().Get("t")); tStr != "" { - if sec, err := strconv.ParseFloat(tStr, 64); err == nil && sec >= 0 { - secI := int64(sec + 0.5) - if secI < 0 { - secI = 0 - } - framePath := filepath.Join(assetDir, fmt.Sprintf("t_%d.webp", secI)) - if fi, err := os.Stat(framePath); err == nil && !fi.IsDir() && fi.Size() > 0 { - servePreviewWebPFile(w, r, framePath) - return - } - - img, err := extractFrameAtTimeWebP(outPath, float64(secI)) - if err == nil && len(img) > 0 { - _ = atomicWriteFile(framePath, img) - servePreviewWebPBytes(w, img) - return - } - } - } - - thumbPath := filepath.Join(assetDir, "preview.webp") - - // 1) Cache hit - if fi, err := os.Stat(thumbPath); err == nil && !fi.IsDir() && fi.Size() > 0 { - servePreviewWebPFile(w, r, thumbPath) - return - } - - // 2) Neu erzeugen - genCtx, cancel := context.WithTimeout(r.Context(), 30*time.Second) - defer cancel() - - var t float64 = 0 - if dur, derr := durationSecondsCached(genCtx, outPath); derr == nil && dur > 0 { - t = dur * 0.5 - } - - img, err := extractFrameAtTimeWebP(outPath, t) - if err != nil || len(img) == 0 { - img, err = extractLastFrameWebP(outPath) - if err != nil || len(img) == 0 { - // fallback: erster Frame skaliert - img, err = extractFirstFrameWebPScaled(outPath, 720, 75) - if err != nil || len(img) == 0 { - http.Error(w, "konnte preview nicht erzeugen", http.StatusInternalServerError) - return - } - } - } - - _ = atomicWriteFile(thumbPath, img) - servePreviewWebPBytes(w, img) -} diff --git a/backend/record_handlers.go b/backend/record.go similarity index 67% rename from backend/record_handlers.go rename to backend/record.go index b3675ca..35f4eb4 100644 --- a/backend/record_handlers.go +++ b/backend/record.go @@ -1,5 +1,4 @@ -// backend\record_handlers.go - +// backend/record.go package main import ( @@ -10,7 +9,6 @@ import ( "net/http" "net/url" "os" - "path" "path/filepath" "reflect" "runtime" @@ -22,6 +20,8 @@ import ( "time" ) +// ---------------- Types ---------------- + type RecordRequest struct { URL string `json:"url"` Cookie string `json:"cookie,omitempty"` @@ -72,7 +72,7 @@ type durationItem struct { } type undoDeleteToken struct { - Trash string `json:"trash"` // basename in .trash + Trash string `json:"trash"` // basename in .trash (legacy/optional) RelDir string `json:"relDir"` // dir relativ zu doneAbs, z.B. ".", "keep/model", "model" File string `json:"file"` // original basename, z.B. "HOT xyz.mp4" } @@ -97,127 +97,25 @@ func decodeUndoDeleteToken(raw string) (undoDeleteToken, error) { return t, nil } -func isSafeRelDir(rel string) bool { - rel = strings.TrimSpace(rel) - if rel == "" { - return false - } - // normalize to slash for validation - rel = filepath.ToSlash(rel) - if strings.HasPrefix(rel, "/") { - return false - } - clean := path.Clean(rel) // path.Clean => forward slashes - if clean == "." { - return true - } - if strings.HasPrefix(clean, "../") || clean == ".." { - return false - } - // prevent weird traversal - if strings.Contains(clean, `\`) { - return false - } - return true +// ---------------- Small response helpers ---------------- + +func respondJSON(w http.ResponseWriter, v any) { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("Cache-Control", "no-store") + _ = json.NewEncoder(w).Encode(v) } -func isSafeBasename(name string) bool { - name = strings.TrimSpace(name) - if name == "" { - return false +func mustMethod(w http.ResponseWriter, r *http.Request, methods ...string) bool { + for _, m := range methods { + if r.Method == m { + return true + } } - if strings.Contains(name, "/") || strings.Contains(name, "\\") { - return false - } - return filepath.Base(name) == name + http.Error(w, "Method Not Allowed", http.StatusMethodNotAllowed) + return false } -func intFromAny(v any) (int, bool) { - switch x := v.(type) { - case int: - return x, true - case int8: - return int(x), true - case int16: - return int(x), true - case int32: - return int(x), true - case int64: - return int(x), true - case uint: - return int(x), true - case uint8: - return int(x), true - case uint16: - return int(x), true - case uint32: - return int(x), true - case uint64: - return int(x), true - case float32: - return int(x), true - case float64: - return int(x), true - case json.Number: - if i, err := x.Int64(); err == nil { - return int(i), true - } - if f, err := x.Float64(); err == nil { - return int(f), true - } - case string: - s := strings.TrimSpace(x) - if s == "" { - return 0, false - } - if i, err := strconv.Atoi(s); err == nil { - return i, true - } - } - return 0, false -} - -func floatFromAny(v any) (float64, bool) { - switch x := v.(type) { - case float32: - return float64(x), true - case float64: - return x, true - case int: - return float64(x), true - case int8: - return float64(x), true - case int16: - return float64(x), true - case int32: - return float64(x), true - case int64: - return float64(x), true - case uint: - return float64(x), true - case uint8: - return float64(x), true - case uint16: - return float64(x), true - case uint32: - return float64(x), true - case uint64: - return float64(x), true - case json.Number: - if f, err := x.Float64(); err == nil { - return f, true - } - case string: - s := strings.TrimSpace(x) - if s == "" { - return 0, false - } - if f, err := strconv.ParseFloat(s, 64); err == nil { - return f, true - } - } - return 0, false -} +// ---------------- Preview sprite truth (shared) ---------------- type previewSpriteMetaFileInfo struct { Count int @@ -226,6 +124,7 @@ type previewSpriteMetaFileInfo struct { StepSeconds float64 } +// Best-effort parsing "previewSprite" from meta.json func readPreviewSpriteMetaFromMetaFile(metaPath string) (previewSpriteMetaFileInfo, bool) { var out previewSpriteMetaFileInfo @@ -246,6 +145,93 @@ func readPreviewSpriteMetaFromMetaFile(metaPath string) (previewSpriteMetaFileIn return out, false } + intFromAny := func(v any) (int, bool) { + switch x := v.(type) { + case int: + return x, true + case int8: + return int(x), true + case int16: + return int(x), true + case int32: + return int(x), true + case int64: + return int(x), true + case uint: + return int(x), true + case uint8: + return int(x), true + case uint16: + return int(x), true + case uint32: + return int(x), true + case uint64: + return int(x), true + case float32: + return int(x), true + case float64: + return int(x), true + case json.Number: + if i, err := x.Int64(); err == nil { + return int(i), true + } + if f, err := x.Float64(); err == nil { + return int(f), true + } + case string: + s := strings.TrimSpace(x) + if s == "" { + return 0, false + } + if i, err := strconv.Atoi(s); err == nil { + return i, true + } + } + return 0, false + } + + floatFromAny := func(v any) (float64, bool) { + switch x := v.(type) { + case float32: + return float64(x), true + case float64: + return x, true + case int: + return float64(x), true + case int8: + return float64(x), true + case int16: + return float64(x), true + case int32: + return float64(x), true + case int64: + return float64(x), true + case uint: + return float64(x), true + case uint8: + return float64(x), true + case uint16: + return float64(x), true + case uint32: + return float64(x), true + case uint64: + return float64(x), true + case json.Number: + if f, err := x.Float64(); err == nil { + return f, true + } + case string: + s := strings.TrimSpace(x) + if s == "" { + return 0, false + } + if f, err := strconv.ParseFloat(s, 64); err == nil { + return f, true + } + } + return 0, false + } + if n, ok := intFromAny(ps["count"]); ok && n > 0 { out.Count = n } else if n, ok := intFromAny(ps["frames"]); ok && n > 0 { @@ -269,7 +255,6 @@ func readPreviewSpriteMetaFromMetaFile(metaPath string) (previewSpriteMetaFileIn out.StepSeconds = f } - // gültig, wenn mindestens count oder grid vorhanden ist if out.Count > 0 || (out.Cols > 0 && out.Rows > 0) { return out, true } @@ -297,11 +282,9 @@ func previewSpriteTruthForID(id string) previewSpriteMetaResp { return out } - // ✅ echte Datei existiert out.Exists = true out.Path = "/api/preview-sprite/" + url.PathEscape(id) - // Meta-Felder best-effort aus meta.json lesen if ps, ok := readPreviewSpriteMetaFromMetaFile(metaPath); ok { if ps.Count > 0 { out.Count = ps.Count @@ -327,6 +310,7 @@ func applyPreviewSpriteTruthToDoneMetaResp(id string, resp *doneMetaFileResp) { resp.PreviewSprite = previewSpriteTruthForID(id) } +// robust meta setter into RecordJob.Meta (any/string/[]byte/typed map) func metaMapFromAny(v any) map[string]any { out := map[string]any{} @@ -378,7 +362,6 @@ func metaMapFromAny(v any) map[string]any { return out default: - // best effort: unbekannten Typ in map re-hydraten b, err := json.Marshal(x) if err != nil || len(b) == 0 { return out @@ -398,7 +381,6 @@ func setStructFieldJSONMap(fv reflect.Value, m map[string]any) { return } - // JSON serialisieren (für string / []byte / typed map / struct) b, err := json.Marshal(m) if err != nil { return @@ -406,7 +388,6 @@ func setStructFieldJSONMap(fv reflect.Value, m map[string]any) { switch fv.Kind() { case reflect.Interface: - // interface{} / any -> direkt map setzen fv.Set(reflect.ValueOf(m)) return @@ -415,14 +396,12 @@ func setStructFieldJSONMap(fv reflect.Value, m map[string]any) { return case reflect.Slice: - // []byte / json.RawMessage if fv.Type().Elem().Kind() == reflect.Uint8 { fv.SetBytes(b) return } } - // Fallback: in den echten Feldtyp unmarshaln ptr := reflect.New(fv.Type()) if err := json.Unmarshal(b, ptr.Interface()); err == nil { fv.Set(ptr.Elem()) @@ -434,7 +413,6 @@ func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) { return } - // ID aus Output ableiten (canonical: ohne HOT, ohne Ext) outPath := strings.TrimSpace(j.Output) if outPath == "" { return @@ -442,10 +420,12 @@ func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) { base := filepath.Base(outPath) id := stripHotPrefix(strings.TrimSuffix(base, filepath.Ext(base))) id = strings.TrimSpace(id) + if id == "" { + return + } ps := previewSpriteTruthForID(id) - // per Reflection auf Feld "Meta" zugreifen (robust gegen Meta-Typ) rv := reflect.ValueOf(j) if rv.Kind() != reflect.Pointer || rv.IsNil() { return @@ -457,7 +437,6 @@ func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) { fv := sv.FieldByName("Meta") if !fv.IsValid() || !fv.CanSet() { - // Falls RecordJob kein Meta-Feld hat -> nichts zu tun return } @@ -478,18 +457,12 @@ func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) { meta = map[string]any{} } - // ✅ Legacy/Fallback Felder killen (falls vorhanden) delete(meta, "previewScrubberPath") delete(meta, "previewScrubberCount") - // ✅ previewSprite hart mit echter Dateiwahrheit überschreiben - psMap := map[string]any{ - "exists": ps.Exists, - } - + psMap := map[string]any{"exists": ps.Exists} if ps.Exists { psMap["path"] = ps.Path - if ps.Count > 0 { psMap["count"] = ps.Count } @@ -503,22 +476,21 @@ func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) { psMap["stepSeconds"] = ps.StepSeconds } } - meta["previewSprite"] = psMap setStructFieldJSONMap(fv, meta) } +// ---------------- Handlers ---------------- + func recordList(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet { - http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodGet) { return } jobsMu.Lock() list := make([]*RecordJob, 0, len(jobs)) for _, j := range jobs { - // ✅ NEU: Hidden (und nil) nicht ausgeben -> UI sieht Probe-Jobs nicht if j == nil || j.Hidden { continue } @@ -526,31 +498,19 @@ func recordList(w http.ResponseWriter, r *http.Request) { } jobsMu.Unlock() - // optional: neueste zuerst sort.Slice(list, func(i, j int) bool { return list[i].StartedAt.After(list[j].StartedAt) }) - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(list) + respondJSON(w, list) } -func writeSSE(w http.ResponseWriter, data []byte) { - // SSE spec: jede Zeile mit "data:" prefixen - s := strings.ReplaceAll(string(data), "\r\n", "\n") - lines := strings.Split(s, "\n") - for _, line := range lines { - fmt.Fprintf(w, "data: %s\n", line) - } - fmt.Fprint(w, "\n") -} +// SSE (done stream) func handleDoneStream(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/event-stream; charset=utf-8") w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate") w.Header().Set("Connection", "keep-alive") - // wichtig für nginx / reverse proxies w.Header().Set("X-Accel-Buffering", "no") flusher, ok := w.(http.Flusher) @@ -559,12 +519,10 @@ func handleDoneStream(w http.ResponseWriter, r *http.Request) { return } - // pro client ein channel ch := make(chan []byte, 32) doneHub.add(ch) defer doneHub.remove(ch) - // ✅ KEIN doneChanged als hello – nur Kommentar fmt.Fprintf(w, ": hello seq=%d ts=%d\n\n", atomic.LoadUint64(&doneSeq), time.Now().UnixMilli()) flusher.Flush() @@ -578,7 +536,6 @@ func handleDoneStream(w http.ResponseWriter, r *http.Request) { return case <-ping.C: - // ✅ Keepalive als Kommentar (triggert keine addEventListener("doneChanged")) fmt.Fprintf(w, ": ping ts=%d\n\n", time.Now().UnixMilli()) flusher.Flush() @@ -586,7 +543,6 @@ func handleDoneStream(w http.ResponseWriter, r *http.Request) { if !ok { return } - // ✅ nur echte Changes als doneChanged fmt.Fprintf(w, "event: doneChanged\n") fmt.Fprintf(w, "data: %s\n\n", b) flusher.Flush() @@ -594,13 +550,8 @@ func handleDoneStream(w http.ResponseWriter, r *http.Request) { } } -func handleRecordVideo(w http.ResponseWriter, r *http.Request) { - recordVideo(w, r) -} - func startRecordingFromRequest(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPost { - http.Error(w, "Nur POST erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodPost) { return } @@ -616,12 +567,10 @@ func startRecordingFromRequest(w http.ResponseWriter, r *http.Request) { return } - w.Header().Set("Content-Type", "application/json") - _ = json.NewEncoder(w).Encode(job) + respondJSON(w, job) } // ---- track if headers/body were already written ---- -// (Go methods must be at package scope) type rwTrack struct { http.ResponseWriter wrote bool @@ -643,13 +592,11 @@ func (t *rwTrack) Write(p []byte) (int, error) { } // ensureMetaJSONForPlayback erzeugt generated/meta//meta.json falls sie fehlt. -// Best-effort: wenn es nicht geht (FFprobe fehlt, Fehler, etc.), wird Playback nicht verhindert. +// Best-effort: wenn es nicht geht, wird Playback nicht verhindert. func ensureMetaJSONForPlayback(ctx context.Context, videoPath string) { - // nur mp4 (nach TS-remux ist es mp4) if strings.ToLower(filepath.Ext(videoPath)) != ".mp4" { return } - videoPath = strings.TrimSpace(videoPath) if videoPath == "" { return @@ -660,33 +607,25 @@ func ensureMetaJSONForPlayback(ctx context.Context, videoPath string) { return } - // ✅ Zentrale Meta-Logik benutzen (meta.go) - // - liest vorhandene gültige meta - // - erzeugt v2-meta bei Bedarf - // - fallbackt auf duration-cache-only meta, wenn ffprobe gerade nicht geht _, _ = ensureVideoMetaForFileBestEffort(ctx, videoPath, "") } func recordVideo(w http.ResponseWriter, r *http.Request) { - // ---- wrap writer to detect "already wrote" ---- tw := &rwTrack{ResponseWriter: w} w = tw writeErr := func(code int, msg string) { - // Wenn schon Header/Body raus sind, dürfen wir KEIN http.Error mehr machen, - // sonst gibt's "superfluous response.WriteHeader". if tw.wrote { fmt.Println("[recordVideo] late error (headers already sent):", code, msg) return } - http.Error(w, msg, code) // nutzt WriteHeader+Write -> tw.wrote wird automatisch true + http.Error(w, msg, code) } - writeStatus := func(code int) { if tw.wrote { return } - w.WriteHeader(code) // geht durch rwTrack.WriteHeader + w.WriteHeader(code) } // ---- CORS ---- @@ -695,8 +634,6 @@ func recordVideo(w http.ResponseWriter, r *http.Request) { w.Header().Set("Access-Control-Allow-Origin", origin) w.Header().Set("Vary", "Origin") w.Header().Set("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS") - // Wichtig: Browser schicken bei Video-Range-Requests oft If-Range / If-Modified-Since / If-None-Match. - // Wenn du die nicht erlaubst, schlägt der Preflight fehl -> VideoJS sieht "NETWORK error". w.Header().Set("Access-Control-Allow-Headers", "Range, If-Range, If-Modified-Since, If-None-Match") w.Header().Set("Access-Control-Expose-Headers", "Content-Length, Content-Range, Accept-Ranges, ETag, Last-Modified") w.Header().Set("Access-Control-Allow-Credentials", "true") @@ -706,113 +643,9 @@ func recordVideo(w http.ResponseWriter, r *http.Request) { return } - // ---- resolve outPath from file or id ---- - resolveOutPath := func() (string, bool) { - // ✅ Wiedergabe über Dateiname (für doneDir / recordDir) - if rawFile := strings.TrimSpace(r.URL.Query().Get("file")); rawFile != "" { - file, err := url.QueryUnescape(rawFile) - if err != nil { - writeErr(http.StatusBadRequest, "ungültiger file") - return "", false - } - file = strings.TrimSpace(file) - - // kein Pfad, keine Backslashes, kein Traversal - if file == "" || - strings.Contains(file, "/") || - strings.Contains(file, "\\") || - filepath.Base(file) != file { - writeErr(http.StatusBadRequest, "ungültiger file") - return "", false - } - - ext := strings.ToLower(filepath.Ext(file)) - if ext != ".mp4" && ext != ".ts" { - writeErr(http.StatusForbidden, "nicht erlaubt") - return "", false - } - - s := getSettings() - recordAbs, err := resolvePathRelativeToApp(s.RecordDir) - if err != nil { - writeErr(http.StatusInternalServerError, "recordDir auflösung fehlgeschlagen: "+err.Error()) - return "", false - } - doneAbs, err := resolvePathRelativeToApp(s.DoneDir) - if err != nil { - writeErr(http.StatusInternalServerError, "doneDir auflösung fehlgeschlagen: "+err.Error()) - return "", false - } - - // Kandidaten: erst done (inkl. 1 Level Subdir, aber ohne "keep"), - // dann keep (inkl. 1 Level Subdir), dann recordDir - names := []string{file} - if ext == ".ts" { - names = append(names, strings.TrimSuffix(file, ext)+".mp4") - } - - var outPath string - for _, name := range names { - if p, _, ok := findFileInDirOrOneLevelSubdirs(doneAbs, name, "keep"); ok { - outPath = p - break - } - if p, _, ok := findFileInDirOrOneLevelSubdirs(filepath.Join(doneAbs, "keep"), name, ""); ok { - outPath = p - break - } - if p, _, ok := findFileInDirOrOneLevelSubdirs(recordAbs, name, ""); ok { - outPath = p - break - } - } - if outPath == "" { - writeErr(http.StatusNotFound, "datei nicht gefunden") - return "", false - } - return filepath.Clean(strings.TrimSpace(outPath)), true - } - - // ✅ ALT: Wiedergabe über Job-ID (funktioniert nur solange Job im RAM existiert) - id := strings.TrimSpace(r.URL.Query().Get("id")) - if id == "" { - writeErr(http.StatusBadRequest, "id fehlt") - return "", false - } - - jobsMu.Lock() - job, ok := jobs[id] - jobsMu.Unlock() - if !ok { - writeErr(http.StatusNotFound, "job nicht gefunden") - return "", false - } - - outPath := filepath.Clean(strings.TrimSpace(job.Output)) - if outPath == "" { - writeErr(http.StatusNotFound, "output fehlt") - return "", false - } - - if !filepath.IsAbs(outPath) { - abs, err := resolvePathRelativeToApp(outPath) - if err != nil { - writeErr(http.StatusInternalServerError, "pfad auflösung fehlgeschlagen: "+err.Error()) - return "", false - } - outPath = abs - } - - fi, err := os.Stat(outPath) - if err != nil || fi.IsDir() || fi.Size() == 0 { - writeErr(http.StatusNotFound, "datei nicht gefunden") - return "", false - } - return outPath, true - } - - outPath, ok := resolveOutPath() + outPath, ok, code, msg := resolvePlayablePathFromQuery(r) if !ok { + writeErr(code, msg) return } @@ -830,7 +663,7 @@ func recordVideo(w http.ResponseWriter, r *http.Request) { outPath = filepath.Clean(strings.TrimSpace(newOut)) fi, err := os.Stat(outPath) - if err != nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" { + if err != nil || fi == nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" { writeErr(http.StatusInternalServerError, "Remux-Ergebnis ungültig") return } @@ -853,16 +686,14 @@ func recordVideo(w http.ResponseWriter, r *http.Request) { } } - // ✅ meta.json sicherstellen (best effort), bevor wir ausliefern ensureMetaJSONForPlayback(r.Context(), outPath) - // ✅ immer Original-Datei ausliefern (Range-fähig via serveVideoFile) w.Header().Set("Cache-Control", "no-store") serveVideoFile(w, r, outPath) } func recordStatus(w http.ResponseWriter, r *http.Request) { - id := r.URL.Query().Get("id") + id := q(r, "id") if id == "" { http.Error(w, "id fehlt", http.StatusBadRequest) return @@ -878,17 +709,15 @@ func recordStatus(w http.ResponseWriter, r *http.Request) { } applyPreviewSpriteTruthToRecordJobMeta(job) - - json.NewEncoder(w).Encode(job) + respondJSON(w, job) } func recordStop(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPost { - http.Error(w, "Nur POST", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodPost) { return } - id := r.URL.Query().Get("id") + id := q(r, "id") jobsMu.Lock() job, ok := jobs[id] @@ -899,16 +728,64 @@ func recordStop(w http.ResponseWriter, r *http.Request) { } stopJobsInternal([]*RecordJob{job}) - - w.Header().Set("Content-Type", "application/json") - _ = json.NewEncoder(w).Encode(job) + respondJSON(w, job) } +// ---------------- Done index cache ---------------- + +type doneIndexItem struct { + job *RecordJob + endedAt time.Time + fileSort string + fromKeep bool + modelKey string // lower +} + +type doneIndexCache struct { + mu sync.Mutex + builtAt time.Time + seq uint64 + doneAbs string + + items []doneIndexItem + sortedIdx map[string][]int // key: "|" +} + +var doneCache doneIndexCache + +func normalizeQueryModel(raw string) string { + s := strings.TrimSpace(raw) + if s == "" { + return "" + } + s = strings.TrimPrefix(s, "http://") + s = strings.TrimPrefix(s, "https://") + + if strings.Contains(s, "/") { + parts := strings.Split(s, "/") + for i := len(parts) - 1; i >= 0; i-- { + p := strings.TrimSpace(parts[i]) + if p != "" { + s = p + break + } + } + } + if strings.Contains(s, ":") { + parts := strings.Split(s, ":") + s = strings.TrimSpace(parts[len(parts)-1]) + } + + s = strings.TrimPrefix(s, "@") + return strings.ToLower(strings.TrimSpace(s)) +} + +// buildDoneIndex: identical logic as your previous record_handlers.go (indexing done + keep) func buildDoneIndex(doneAbs string) ([]doneIndexItem, map[string][]int) { items := make([]doneIndexItem, 0, 2048) sortedIdx := make(map[string][]int) - isTrashPath := func(full string) bool { + isTrashPathLocal := func(full string) bool { p := strings.ToLower(filepath.ToSlash(strings.TrimSpace(full))) return strings.Contains(p, "/.trash/") || strings.HasSuffix(p, "/.trash") } @@ -917,7 +794,7 @@ func buildDoneIndex(doneAbs string) ([]doneIndexItem, map[string][]int) { if fi == nil || fi.IsDir() || fi.Size() == 0 { return } - if isTrashPath(full) { + if isTrashPathLocal(full) { return } @@ -947,10 +824,9 @@ func buildDoneIndex(doneAbs string) ([]doneIndexItem, map[string][]int) { start = time.Date(yy, time.Month(mm), dd, hh, mi, ss, 0, time.Local) } - // modelKey (lower) – nutze deine bestehende Logik + // modelKey (lower) mk := strings.ToLower(strings.TrimSpace(modelKeyFromFilenameOrPath(name, full, doneAbs))) if mk == "" { - // fallback: parent dir (skip keep) parent := strings.ToLower(strings.TrimSpace(filepath.Base(filepath.Dir(full)))) if parent != "" && parent != "keep" { mk = parent @@ -961,7 +837,7 @@ func buildDoneIndex(doneAbs string) ([]doneIndexItem, map[string][]int) { fs := strings.ToLower(name) fs = strings.TrimPrefix(fs, "hot ") - // duration + srcURL (wie bei dir: meta.json, dann cache-only) + // duration + srcURL dur := 0.0 srcURL := "" @@ -1043,11 +919,9 @@ func buildDoneIndex(doneAbs string) ([]doneIndexItem, map[string][]int) { // done (ohne keep) scanDir(doneAbs, true) - // keep (optional im Index, damit includeKeep schnell ist) + // keep scanDir(filepath.Join(doneAbs, "keep"), false) - // Pre-sort für häufigen Fall: includeKeep true/false und die Sort-Modes - // (nur wenn KEIN model-Filter genutzt wird) mkSorted := func(includeKeep bool, sortMode string) []int { idx := make([]int, 0, len(items)) for i := range items { @@ -1166,81 +1040,19 @@ func buildDoneIndex(doneAbs string) ([]doneIndexItem, map[string][]int) { return items, sortedIdx } -// ⬆️ Ergänze im Import-Block (falls noch nicht drin): -// import "sync" - -type doneIndexItem struct { - job *RecordJob - endedAt time.Time - fileSort string - fromKeep bool - modelKey string // lower -} - -type doneIndexCache struct { - mu sync.Mutex - builtAt time.Time - seq uint64 - doneAbs string - - items []doneIndexItem - sortedIdx map[string][]int // key: "|" -} - -var doneCache doneIndexCache - -func normalizeQueryModel(raw string) string { - s := strings.TrimSpace(raw) - if s == "" { - return "" - } - s = strings.TrimPrefix(s, "http://") - s = strings.TrimPrefix(s, "https://") - - // letzter URL-Segment, falls jemand ".../modelname" übergibt - if strings.Contains(s, "/") { - parts := strings.Split(s, "/") - for i := len(parts) - 1; i >= 0; i-- { - p := strings.TrimSpace(parts[i]) - if p != "" { - s = p - break - } - } - } - // falls "host:model" übergeben wird - if strings.Contains(s, ":") { - parts := strings.Split(s, ":") - s = strings.TrimSpace(parts[len(parts)-1]) - } - - s = strings.TrimPrefix(s, "@") - return strings.ToLower(strings.TrimSpace(s)) -} +// ---------------- Done meta + list ---------------- func recordDoneMeta(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet { - http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodGet) { return } - // ✅ NEW: File-Mode: /api/record/done/meta?file=XYZ.mp4 - if raw := strings.TrimSpace(r.URL.Query().Get("file")); raw != "" { - file, err := url.QueryUnescape(raw) - if err != nil { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - file = strings.TrimSpace(file) - - // nur Basename erlauben (kein Traversal) - if !isSafeBasename(file) { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - - ext := strings.ToLower(filepath.Ext(file)) - if ext != ".mp4" && ext != ".ts" { + // File-Mode: /api/record/done/meta?file=XYZ.mp4 + if file, ok, err := safeBasenameQuery(r, "file"); err != nil { + http.Error(w, "ungültiger file", http.StatusBadRequest) + return + } else if ok { + if !isAllowedVideoExt(file) { http.Error(w, "nicht erlaubt", http.StatusForbidden) return } @@ -1256,14 +1068,12 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { return } - // Datei in done/ oder keep/ finden full, _, fi, err := resolveDoneFileByName(doneAbs, file) if err != nil || fi == nil || fi.IsDir() || fi.Size() == 0 { http.Error(w, "datei nicht gefunden", http.StatusNotFound) return } - // optional: TS -> MP4 remux (meta soll sich auf abspielbare MP4 beziehen) outPath := filepath.Clean(strings.TrimSpace(full)) if strings.ToLower(filepath.Ext(outPath)) == ".ts" { if newOut, rerr := maybeRemuxTS(outPath); rerr == nil && strings.TrimSpace(newOut) != "" { @@ -1274,15 +1084,11 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { } } - // ✅ best-effort meta.json erzeugen ensureMetaJSONForPlayback(r.Context(), outPath) resp := doneMetaFileResp{File: filepath.Base(outPath)} - // meta lesen (wenn vorhanden) id := stripHotPrefix(strings.TrimSuffix(filepath.Base(outPath), filepath.Ext(outPath))) - - // ✅ Preview-Sprite-Truth immer setzen (explizit true/false) applyPreviewSpriteTruthToDoneMetaResp(id, &resp) if strings.TrimSpace(id) != "" { @@ -1302,7 +1108,6 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { } } - // fallback: wenn Meta existiert aber Duration fehlt -> zentralen Cache/ffprobe nutzen if resp.DurationSeconds <= 0 { pctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) defer cancel() @@ -1311,21 +1116,13 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { } } - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(resp) + respondJSON(w, resp) return } - // --------------------- - // ✅ ORIGINAL: Count-Mode (wie vorher) - // --------------------- - - // optional: includeKeep (falls du später mal brauchst) + // Count-Mode qKeep := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("includeKeep"))) includeKeep := qKeep == "1" || qKeep == "true" || qKeep == "yes" - - // optional: model filter (falls du später mal brauchst) qModel := normalizeQueryModel(r.URL.Query().Get("model")) s := getSettings() @@ -1338,7 +1135,6 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { curSeq := atomic.LoadUint64(&doneSeq) now := time.Now() - // Cache rebuild (wie in recordDoneList; Count kommt aus Index) doneCache.mu.Lock() needRebuild := doneCache.seq != curSeq || doneCache.doneAbs != doneAbs || @@ -1375,7 +1171,6 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { sortedAll := doneCache.sortedIdx doneCache.mu.Unlock() - // Count bestimmen count := 0 if qModel == "" { incKey := "0" @@ -1394,25 +1189,18 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { } } - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(doneMetaResp{Count: count}) + respondJSON(w, doneMetaResp{Count: count}) } func recordDoneList(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet { - http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodGet) { return } - // ✅ optional: auch /done/keep/ einbeziehen (Standard: false) qKeep := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("includeKeep"))) includeKeep := qKeep == "1" || qKeep == "true" || qKeep == "yes" - - // ✅ NEU: optionaler Model-Filter (Pagination dann "pro Model" sinnvoll) qModel := normalizeQueryModel(r.URL.Query().Get("model")) - // optional: Pagination (1-based). Wenn page/pageSize fehlen -> wie vorher: komplette Liste page := 0 pageSize := 0 if v := strings.TrimSpace(r.URL.Query().Get("page")); v != "" { @@ -1426,14 +1214,10 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { } } - // optional: Sort - // supported: completed_(asc|desc), model_(asc|desc), file_(asc|desc), duration_(asc|desc), size_(asc|desc) sortMode := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("sort"))) if sortMode == "" { sortMode = "completed_desc" } - - // ⚠️ Backwards-Compat: alte model_* Sorts auf file_* mappen if sortMode == "model_asc" { sortMode = "file_asc" } @@ -1441,7 +1225,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { sortMode = "file_desc" } - // ✅ all=1 -> immer komplette Liste zurückgeben (Pagination deaktivieren) qAll := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("all"))) fetchAll := qAll == "1" || qAll == "true" || qAll == "yes" if fetchAll { @@ -1449,35 +1232,32 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { pageSize = 0 } - // ✅ optional: count mitsenden qWithCount := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("withCount"))) withCount := qWithCount == "1" || qWithCount == "true" || qWithCount == "yes" - durationForSort := func(j *RecordJob) (sec float64, ok bool) { - if j.DurationSeconds > 0 { - return j.DurationSeconds, true - } - return 0, false - } - compareIdx := func(items []doneIndexItem, sortMode string, ia, ib int) bool { a := items[ia] b := items[ib] ta, tb := a.endedAt, b.endedAt + durationForSort := func(j *RecordJob) (sec float64, ok bool) { + if j.DurationSeconds > 0 { + return j.DurationSeconds, true + } + return 0, false + } + switch sortMode { case "completed_asc": if !ta.Equal(tb) { return ta.Before(tb) } return a.fileSort < b.fileSort - case "completed_desc": if !ta.Equal(tb) { return ta.After(tb) } return a.fileSort < b.fileSort - case "file_asc": if a.fileSort != b.fileSort { return a.fileSort < b.fileSort @@ -1486,7 +1266,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return ta.After(tb) } return a.fileSort < b.fileSort - case "file_desc": if a.fileSort != b.fileSort { return a.fileSort > b.fileSort @@ -1495,12 +1274,11 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return ta.After(tb) } return a.fileSort < b.fileSort - case "duration_asc": da, okA := durationForSort(a.job) db, okB := durationForSort(b.job) if okA != okB { - return okA // unknown nach hinten + return okA } if okA && okB && da != db { return da < db @@ -1509,7 +1287,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return ta.After(tb) } return a.fileSort < b.fileSort - case "duration_desc": da, okA := durationForSort(a.job) db, okB := durationForSort(b.job) @@ -1523,7 +1300,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return ta.After(tb) } return a.fileSort < b.fileSort - case "size_asc": if a.job.SizeBytes != b.job.SizeBytes { return a.job.SizeBytes < b.job.SizeBytes @@ -1532,7 +1308,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return ta.After(tb) } return a.fileSort < b.fileSort - case "size_desc": if a.job.SizeBytes != b.job.SizeBytes { return a.job.SizeBytes > b.job.SizeBytes @@ -1541,7 +1316,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return ta.After(tb) } return a.fileSort < b.fileSort - default: if !ta.Equal(tb) { return ta.After(tb) @@ -1550,7 +1324,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { } } - // --- resolve done path --- s := getSettings() doneAbs, err := resolvePathRelativeToApp(s.DoneDir) if err != nil { @@ -1558,20 +1331,11 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { return } - // Wenn kein DoneDir gesetzt ist → einfach leere Liste zurückgeben if strings.TrimSpace(doneAbs) == "" { - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(doneListResponse{ - Items: []*RecordJob{}, - TotalCount: 0, - Page: page, - PageSize: pageSize, - }) + respondJSON(w, doneListResponse{Items: []*RecordJob{}, TotalCount: 0, Page: page, PageSize: pageSize}) return } - // rebuild wenn doneSeq geändert oder TTL curSeq := atomic.LoadUint64(&doneSeq) now := time.Now() @@ -1581,7 +1345,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { now.Sub(doneCache.builtAt) > 30*time.Second if needRebuild { - // Wenn doneAbs nicht existiert: leere Daten im Cache if _, err := os.Stat(doneAbs); err != nil && os.IsNotExist(err) { doneCache.items = nil doneCache.sortedIdx = make(map[string][]int, 16) @@ -1612,27 +1375,21 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { sortedAll := doneCache.sortedIdx doneCache.mu.Unlock() - // --------- Request-spezifische Auswahl (Model-Filter, includeKeep, sort, paging) --------- - incKey := "0" if includeKeep { incKey = "1" } - // idx enthält indices in items var idx []int - if qModel == "" { idx = sortedAll[incKey+"|"+sortMode] if idx == nil { - // fallback idx = sortedAll[incKey+"|completed_desc"] if idx == nil { idx = make([]int, 0) } } } else { - // Model-Filter: nur Teilmenge, dann sortieren idx = make([]int, 0, 256) for i := range items { if !includeKeep && items[i].fromKeep { @@ -1649,10 +1406,8 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { totalCount := len(idx) - // Pagination anwenden (nur auf idx) start := 0 end := totalCount - if pageSize > 0 && !fetchAll { if page <= 0 { page = 1 @@ -1670,7 +1425,6 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { } } - // Response jobs bauen out := make([]*RecordJob, 0, max(0, end-start)) for _, ii := range idx[start:end] { @@ -1679,15 +1433,12 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { continue } - // ✅ Kopie erzeugen (wichtig: keine Race/Mutations am Cache-Objekt) c := *base - // Size immer korrekt setzen if fi, err := os.Stat(c.Output); err == nil && fi != nil && !fi.IsDir() && fi.Size() > 0 { c.SizeBytes = fi.Size() } - // Meta nur lesen, wenn es existiert (kein Generieren!) id := stripHotPrefix(strings.TrimSuffix(filepath.Base(c.Output), filepath.Ext(c.Output))) if id != "" { if mp, err := generatedMetaFile(id); err == nil { @@ -1705,34 +1456,18 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { } } - // ✅ Preview-Sprite-Truth im LIST-Payload erzwingen (wichtig für Cards/Gallery) applyPreviewSpriteTruthToRecordJobMeta(&c) - out = append(out, &c) } - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - - // ✅ Wenn Frontend "withCount=1" nutzt: {count, items} if withCount { - _ = json.NewEncoder(w).Encode(map[string]any{ - "count": totalCount, - "items": out, - }) + respondJSON(w, map[string]any{"count": totalCount, "items": out}) return } - // ✅ Standard-Response: immer auch totalCount mitsenden - _ = json.NewEncoder(w).Encode(doneListResponse{ - Items: out, - TotalCount: totalCount, - Page: page, - PageSize: pageSize, - }) + respondJSON(w, doneListResponse{Items: out, TotalCount: totalCount, Page: page, PageSize: pageSize}) } -// mini helper, falls du keinen hast func max(a, b int) int { if a > b { return a @@ -1740,8 +1475,9 @@ func max(a, b int) int { return b } +// ---------------- File operations (delete/undo/keep/hot) ---------------- + func renameWithRetryAggressive(src, dst string) error { - // Mehrere kurze Versuche + leichtes Backoff var lastErr error delays := []time.Duration{ 80 * time.Millisecond, @@ -1757,13 +1493,10 @@ func renameWithRetryAggressive(src, dst string) error { return nil } else { lastErr = err - // nur bei Windows SharingViolation lohnt Retry wirklich if runtime.GOOS != "windows" || !isSharingViolation(err) { return err } } - - // Vor letztem Sleep nicht mehr warten if i < len(delays)-1 { time.Sleep(d) } @@ -1773,37 +1506,17 @@ func renameWithRetryAggressive(src, dst string) error { } func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { - // Frontend nutzt aktuell POST (siehe FinishedDownloads), daher erlauben wir POST + DELETE if r.Method != http.MethodPost && r.Method != http.MethodDelete { http.Error(w, "Nur POST oder DELETE erlaubt", http.StatusMethodNotAllowed) return } - raw := strings.TrimSpace(r.URL.Query().Get("file")) - if raw == "" { - http.Error(w, "file fehlt", http.StatusBadRequest) + file, ok, err := safeBasenameQuery(r, "file") + if err != nil || !ok { + http.Error(w, "file fehlt/ungültig", http.StatusBadRequest) return } - - // sicher decoden - file, err := url.QueryUnescape(raw) - if err != nil { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - file = strings.TrimSpace(file) - - // ✅ nur Basename erlauben (keine Unterordner, kein Traversal) - if file == "" || - strings.Contains(file, "/") || - strings.Contains(file, "\\") || - filepath.Base(file) != file { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - - ext := strings.ToLower(filepath.Ext(file)) - if ext != ".mp4" && ext != ".ts" { + if !isAllowedVideoExt(file) { http.Error(w, "nicht erlaubt", http.StatusForbidden) return } @@ -1819,7 +1532,6 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ done + done/ sowie keep + keep/ target, from, fi, err := resolveDoneFileByName(doneAbs, file) if err != nil { http.Error(w, "datei nicht gefunden", http.StatusNotFound) @@ -1830,14 +1542,10 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ Single-slot Trash: immer nur die *zuletzt* gelöschte Datei erlauben trashDir := filepath.Join(doneAbs, ".trash") - // ✅ Wenn im Single-slot Trash schon was liegt: ID merken, - // aber generated erst löschen, NACHDEM .trash wirklich erfolgreich geleert wurde. prevBase := "" prevCanonical := "" - if b, err := os.ReadFile(filepath.Join(trashDir, "last.json")); err == nil && len(b) > 0 { var prev struct { File string `json:"file"` @@ -1851,8 +1559,6 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { } } - // Trash komplett leeren => ältere Undos sind automatisch ungültig - // ⚠️ Fehler NICHT schlucken: wenn .trash nicht leerbar ist, darf der neue Delete nicht weiterlaufen. if err := os.RemoveAll(trashDir); err != nil { if runtime.GOOS == "windows" && isSharingViolation(err) { http.Error(w, "konnte .trash nicht leeren (Datei wird gerade verwendet). Bitte Player schließen und erneut versuchen.", http.StatusConflict) @@ -1862,11 +1568,8 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ Jetzt ist das alte Trash-Video wirklich endgültig weg → generated/meta// entfernen. if prevCanonical != "" { removeGeneratedForID(prevCanonical) - - // Best-effort: falls irgendwo mal Assets mit HOT-ID entstanden sind if prevBase != "" && prevBase != prevCanonical { removeGeneratedForID(prevBase) } @@ -1877,7 +1580,6 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { return } - // Original-Dir relativ zu doneAbs merken (inkl. keep/ oder ) origDir := filepath.Dir(target) relDir, err := filepath.Rel(doneAbs, origDir) if err != nil { @@ -1889,10 +1591,9 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { relDir = "." } - // ✅ Undo-Token jetzt schon erzeugen, damit wir es als "Single-slot key" speichern können tok, err := encodeUndoDeleteToken(undoDeleteToken{ - Trash: "", // setzen wir gleich (trashName) - RelDir: relDir, // hast du oben schon berechnet + Trash: "", + RelDir: relDir, File: file, }) if err != nil { @@ -1900,14 +1601,10 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { return } - trashName := tok + "__" + file // eindeutig + Token sichtbar in filename + trashName := tok + "__" + file trashName = strings.ReplaceAll(trashName, string(os.PathSeparator), "_") dst := filepath.Join(trashDir, trashName) - // ✅ Token muss auch wissen, wie der Trashname heißt - // (wir encoden den Token nicht neu — wir speichern Trashname separat in last.json) - - // move mit retry (Windows file-lock robust) if err := renameWithRetryAggressive(target, dst); err != nil { if runtime.GOOS == "windows" && isSharingViolation(err) { http.Error(w, "datei wird gerade verwendet (Player offen). Bitte kurz stoppen und erneut versuchen.", http.StatusConflict) @@ -1917,12 +1614,11 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ last.json schreiben: nur dieser Token ist gültig type trashMeta struct { - Token string `json:"token"` // exakt der Query-Token (encoded) - TrashName string `json:"trashName"` // Dateiname in .trash - RelDir string `json:"relDir"` // ursprünglicher Ordner relativ zu doneAbs - File string `json:"file"` // originaler Name (basename) + Token string `json:"token"` + TrashName string `json:"trashName"` + RelDir string `json:"relDir"` + File string `json:"file"` DeletedAt int64 `json:"deletedAt"` } @@ -1937,28 +1633,23 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) { b, _ := json.Marshal(meta) _ = os.WriteFile(filepath.Join(trashDir, "last.json"), b, 0o644) - // Cache/Jobs aufräumen (Assets NICHT hart löschen => Undo bleibt “schnell” möglich) purgeDurationCacheForPath(target) removeJobsByOutputBasename(file) notifyDoneChanged() notifyJobsChanged() - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(map[string]any{ + respondJSON(w, map[string]any{ "ok": true, "file": file, - "from": from, // "done" | "keep" - "undoToken": tok, // ✅ für Undo + "from": from, + "undoToken": tok, "trashed": true, }) - } func recordRestoreVideo(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPost { - http.Error(w, "Nur POST erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodPost) { return } @@ -1968,7 +1659,6 @@ func recordRestoreVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ doneDir auflösen s := getSettings() doneAbs, err := resolvePathRelativeToApp(s.DoneDir) if err != nil { @@ -1980,7 +1670,6 @@ func recordRestoreVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ Single-slot: last.json lesen und Token strikt validieren trashDir := filepath.Join(doneAbs, ".trash") metaPath := filepath.Join(trashDir, "last.json") @@ -2006,41 +1695,33 @@ func recordRestoreVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ Nur der letzte Token ist gültig if raw != meta.Token { http.Error(w, "token ungültig (nicht der letzte)", http.StatusNotFound) return } - // ✅ Token zusätzlich decoden (Format/Signatur prüfen, aber Restore-Daten kommen aus last.json) tok, err := decodeUndoDeleteToken(raw) if err != nil { http.Error(w, "token ungültig", http.StatusBadRequest) return } - // ✅ Safety: nur sichere Pfad-Bestandteile aus meta verwenden if !isSafeBasename(meta.TrashName) || !isSafeBasename(meta.File) || !isSafeRelDir(meta.RelDir) { http.Error(w, "token inhalt ungültig", http.StatusBadRequest) return } - - // ✅ Extra Konsistenzchecks: token.File / token.RelDir müssen zu meta passen (optional aber sinnvoll) if tok.File != meta.File || tok.RelDir != meta.RelDir { http.Error(w, "token passt nicht zu letzter Löschung", http.StatusNotFound) return } - ext := strings.ToLower(filepath.Ext(meta.File)) - if ext != ".mp4" && ext != ".ts" { + if !isAllowedVideoExt(meta.File) { http.Error(w, "nicht erlaubt", http.StatusForbidden) return } - // Quelle: exakt die zuletzt gelöschte Datei src := filepath.Join(trashDir, meta.TrashName) - // Zielordner rekonstruieren (relativ zu doneAbs) rel := meta.RelDir if rel == "." { rel = "" @@ -2049,7 +1730,6 @@ func recordRestoreVideo(w http.ResponseWriter, r *http.Request) { dstDirClean := filepath.Clean(dstDir) doneClean := filepath.Clean(doneAbs) - // safety: dstDir muss innerhalb doneAbs liegen if !strings.HasPrefix(strings.ToLower(dstDirClean)+string(os.PathSeparator), strings.ToLower(doneClean)+string(os.PathSeparator)) && !strings.EqualFold(dstDirClean, doneClean) { http.Error(w, "zielpfad ungültig", http.StatusBadRequest) @@ -2076,55 +1756,35 @@ func recordRestoreVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ Restore soll im Card-Stack "oben" erscheinen (Sort: completed_desc) - // Dafür ModTime auf "jetzt" setzen, weil buildDoneIndex() endedAt aus fi.ModTime() nimmt. now := time.Now() - _ = os.Chtimes(dst, now, now) // best-effort + _ = os.Chtimes(dst, now, now) - // ✅ Optional: Trash leeren, damit Token danach definitiv tot ist _ = os.RemoveAll(trashDir) _ = os.MkdirAll(trashDir, 0o755) - purgeDurationCacheForPath(src) // falls src noch irgendwo gecacht wäre (optional) - purgeDurationCacheForPath(dst) // optional + purgeDurationCacheForPath(src) + purgeDurationCacheForPath(dst) notifyDoneChanged() - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(map[string]any{ + respondJSON(w, map[string]any{ "ok": true, "file": meta.File, - "restoredFile": filepath.Base(dst), // kann __dup enthalten + "restoredFile": filepath.Base(dst), }) } func recordUnkeepVideo(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPost { - http.Error(w, "Nur POST erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodPost) { return } - raw := strings.TrimSpace(r.URL.Query().Get("file")) - if raw == "" { - http.Error(w, "file fehlt", http.StatusBadRequest) + file, ok, err := safeBasenameQuery(r, "file") + if err != nil || !ok { + http.Error(w, "file fehlt/ungültig", http.StatusBadRequest) return } - - file, err := url.QueryUnescape(raw) - if err != nil { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - file = strings.TrimSpace(file) - - if !isSafeBasename(file) { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - - ext := strings.ToLower(filepath.Ext(file)) - if ext != ".mp4" && ext != ".ts" { + if !isAllowedVideoExt(file) { http.Error(w, "nicht erlaubt", http.StatusForbidden) return } @@ -2140,7 +1800,6 @@ func recordUnkeepVideo(w http.ResponseWriter, r *http.Request) { return } - // Quelle muss in keep (root oder keep/) liegen src, from, fi, err := resolveDoneFileByName(doneAbs, file) if err != nil { http.Error(w, "datei nicht gefunden", http.StatusNotFound) @@ -2155,9 +1814,7 @@ func recordUnkeepVideo(w http.ResponseWriter, r *http.Request) { return } - // Ziel: zurück nach done/ (flach, ohne model-subdirs) dstDir := doneAbs - if err := os.MkdirAll(dstDir, 0o755); err != nil { http.Error(w, "done subdir erstellen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) return @@ -2180,9 +1837,7 @@ func recordUnkeepVideo(w http.ResponseWriter, r *http.Request) { notifyDoneChanged() - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(map[string]any{ + respondJSON(w, map[string]any{ "ok": true, "oldFile": file, "newFile": filepath.Base(dst), @@ -2190,35 +1845,16 @@ func recordUnkeepVideo(w http.ResponseWriter, r *http.Request) { } func recordKeepVideo(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPost { - http.Error(w, "Nur POST erlaubt", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodPost) { return } - raw := strings.TrimSpace(r.URL.Query().Get("file")) - if raw == "" { - http.Error(w, "file fehlt", http.StatusBadRequest) + file, ok, err := safeBasenameQuery(r, "file") + if err != nil || !ok { + http.Error(w, "file fehlt/ungültig", http.StatusBadRequest) return } - - file, err := url.QueryUnescape(raw) - if err != nil { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - file = strings.TrimSpace(file) - - // ✅ nur Basename erlauben - if file == "" || - strings.Contains(file, "/") || - strings.Contains(file, "\\") || - filepath.Base(file) != file { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - - ext := strings.ToLower(filepath.Ext(file)) - if ext != ".mp4" && ext != ".ts" { + if !isAllowedVideoExt(file) { http.Error(w, "nicht erlaubt", http.StatusForbidden) return } @@ -2240,36 +1876,27 @@ func recordKeepVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ 0) Wenn schon irgendwo in keep (root oder keep/) existiert: - // - wenn im keep-root: jetzt nach keep// nachziehen + // already in keep? if p, _, ok := findFileInDirOrOneLevelSubdirs(keepRoot, file, ""); ok { - // p liegt entweder in keepRoot oder keepRoot/ if strings.EqualFold(filepath.Clean(filepath.Dir(p)), filepath.Clean(keepRoot)) { - // im Root => versuchen einzusortieren - modelKey := modelKeyFromFilenameOrPath(file, p /* srcPath */, keepRoot /* doneAbs dummy, wird nicht genutzt */) + modelKey := modelKeyFromFilenameOrPath(file, p, keepRoot) modelKey = sanitizeModelKey(modelKey) - - // Optionaler Fallback: wenn wir aus dem keep-root Pfad nix ziehen können, nur aus Filename: if modelKey == "" { stem := strings.TrimSuffix(file, filepath.Ext(file)) modelKey = sanitizeModelKey(modelNameFromFilename(stem)) } - if modelKey != "" { dstDir := filepath.Join(keepRoot, modelKey) if err := os.MkdirAll(dstDir, 0o755); err == nil { dst, derr := uniqueDestPath(dstDir, file) if derr == nil { - // best-effort move _ = renameWithRetry(p, dst) } } } } - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(map[string]any{ + respondJSON(w, map[string]any{ "ok": true, "file": file, "alreadyKept": true, @@ -2277,9 +1904,8 @@ func recordKeepVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ 1) Quelle in done (root oder done/), aber NICHT aus keep - src, fi, ok := findFileInDirOrOneLevelSubdirs(doneAbs, file, "keep") - if !ok { + src, fi, ok2 := findFileInDirOrOneLevelSubdirs(doneAbs, file, "keep") + if !ok2 { http.Error(w, "datei nicht gefunden", http.StatusNotFound) return } @@ -2288,7 +1914,6 @@ func recordKeepVideo(w http.ResponseWriter, r *http.Request) { return } - // ✅ 2) Ziel: keep//file modelKey := modelKeyFromFilenameOrPath(file, src, doneAbs) dstDir := keepRoot if modelKey != "" { @@ -2306,7 +1931,6 @@ func recordKeepVideo(w http.ResponseWriter, r *http.Request) { return } - // rename mit retry (Windows file-lock) if err := renameWithRetryAggressive(src, dst); err != nil { if runtime.GOOS == "windows" && isSharingViolation(err) { http.Error(w, "keep fehlgeschlagen (Datei wird gerade verwendet).", http.StatusConflict) @@ -2318,49 +1942,25 @@ func recordKeepVideo(w http.ResponseWriter, r *http.Request) { notifyDoneChanged() - // ... dein bestehender Cleanup-Block (generated Assets löschen, legacy cleanup, removeJobsByOutputBasename) bleibt unverändert ... - - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(map[string]any{ + respondJSON(w, map[string]any{ "ok": true, "file": file, "alreadyKept": false, - "newFile": filepath.Base(dst), // ✅ NEU + "newFile": filepath.Base(dst), }) - } func recordToggleHot(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodPost { - http.Error(w, "Nur POST", http.StatusMethodNotAllowed) + if !mustMethod(w, r, http.MethodPost) { return } - raw := strings.TrimSpace(r.URL.Query().Get("file")) - if raw == "" { - http.Error(w, "file fehlt", http.StatusBadRequest) + file, ok, err := safeBasenameQuery(r, "file") + if err != nil || !ok { + http.Error(w, "file fehlt/ungültig", http.StatusBadRequest) return } - - file, err := url.QueryUnescape(raw) - if err != nil { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - file = strings.TrimSpace(file) - - // ✅ nur Basename erlauben - if file == "" || - strings.Contains(file, "/") || - strings.Contains(file, "\\") || - filepath.Base(file) != file { - http.Error(w, "ungültiger file", http.StatusBadRequest) - return - } - - ext := strings.ToLower(filepath.Ext(file)) - if ext != ".mp4" && ext != ".ts" { + if !isAllowedVideoExt(file) { http.Error(w, "nicht erlaubt", http.StatusForbidden) return } @@ -2376,7 +1976,7 @@ func recordToggleHot(w http.ResponseWriter, r *http.Request) { return } - // ✅ Quelle kann in done/, done/, keep/, keep/ liegen + // Quelle kann in done/ oder keep/ liegen src, from, fi, err := resolveDoneFileByName(doneAbs, file) if err != nil { http.Error(w, "datei nicht gefunden", http.StatusNotFound) @@ -2387,9 +1987,8 @@ func recordToggleHot(w http.ResponseWriter, r *http.Request) { return } - srcDir := filepath.Dir(src) // ✅ wichtig: toggeln im tatsächlichen Ordner + srcDir := filepath.Dir(src) - // toggle: HOT Prefix newFile := file if strings.HasPrefix(file, "HOT ") { newFile = strings.TrimPrefix(file, "HOT ") @@ -2397,7 +1996,7 @@ func recordToggleHot(w http.ResponseWriter, r *http.Request) { newFile = "HOT " + file } - dst := filepath.Join(srcDir, newFile) // ✅ im selben Ordner toggeln (done oder keep) + dst := filepath.Join(srcDir, newFile) if _, err := os.Stat(dst); err == nil { http.Error(w, "ziel existiert bereits", http.StatusConflict) return @@ -2415,8 +2014,6 @@ func recordToggleHot(w http.ResponseWriter, r *http.Request) { return } - // ✅ KEIN generated-rename! - // Assets bleiben canonical (ohne HOT) canonicalID := stripHotPrefix(strings.TrimSuffix(file, filepath.Ext(file))) renameJobsOutputBasename(file, newFile) @@ -2424,13 +2021,11 @@ func recordToggleHot(w http.ResponseWriter, r *http.Request) { notifyDoneChanged() notifyJobsChanged() - w.Header().Set("Content-Type", "application/json") - w.Header().Set("Cache-Control", "no-store") - _ = json.NewEncoder(w).Encode(map[string]any{ + respondJSON(w, map[string]any{ "ok": true, "oldFile": file, "newFile": newFile, "canonicalID": canonicalID, - "from": from, // "done" | "keep" + "from": from, }) } diff --git a/backend/record_job_progress.go b/backend/record_job_progress.go deleted file mode 100644 index 8a33b31..0000000 --- a/backend/record_job_progress.go +++ /dev/null @@ -1,105 +0,0 @@ -package main - -import ( - "math" - "strings" -) - -func setJobProgress(job *RecordJob, phase string, pct int) { - phase = strings.TrimSpace(phase) - phaseLower := strings.ToLower(phase) - - // clamp pct 0..100 - if pct < 0 { - pct = 0 - } - if pct > 100 { - pct = 100 - } - - // "globale" Zielbereiche pro Phase (dein Pipeline-Modell) - // postwork wartet: 70..72 - // remuxing: 72..78 - // moving: 78..84 - // probe: 84..86 - // assets: 86..99 - type rng struct{ start, end int } - rangeFor := func(ph string) rng { - switch ph { - case "postwork": - return rng{0, 5} - case "remuxing": - return rng{5, 65} - case "moving": - return rng{65, 75} - case "probe": - return rng{75, 80} - case "assets": - return rng{80, 99} - default: - return rng{0, 100} - } - } - - jobsMu.Lock() - defer jobsMu.Unlock() - - // Sobald Postwork läuft oder Aufnahme beendet ist -> Recorder darf NICHTS mehr überschreiben. - inPostwork := job.EndedAt != nil || (strings.TrimSpace(job.Phase) != "" && strings.ToLower(strings.TrimSpace(job.Phase)) != "recording") - if inPostwork { - // harte Blockade: alte recording-Updates dürfen weder Phase noch Progress anfassen - if phaseLower == "" || phaseLower == "recording" { - return - } - } - - // Phase aktualisieren (aber nur wenn nicht leer) - if phase != "" { - job.Phase = phase - } - - // ✅ Sonderfall: "wartet auf Nachbearbeitung" => Progress bleibt 0% - // Erwartung: Caller sendet phase="postwork" und pct=0 solange nur gewartet wird. - // Muss vor "niemals rückwärts" passieren, sonst käme man von Recording-Progress nicht mehr auf 0. - if phaseLower == "postwork" && pct == 0 { - job.Progress = 0 - return - } - - // Progress-Logik: - // - wenn wir in Postwork sind und jemand phasenlokale 0..100 liefert (z.B. remuxing 25), - // mappe das in den globalen Bereich der Phase. - // - danach: niemals rückwärts. - mapped := pct - - if inPostwork { - r := rangeFor(phaseLower) - if r.end >= r.start { - // Heuristik: - // - Wenn pct bereits im globalen Bereich der Phase liegt => als global interpretieren, clampen. - // - Sonst => als lokales 0..100 interpretieren und in [start..end] mappen. - if pct >= r.start && pct <= r.end { - // schon global - mapped = pct - } else { - // lokal 0..100 -> global - width := float64(r.end - r.start) - mapped = r.start + int(math.Round((float64(pct)/100.0)*width)) - } - - // clamp in den Bereich - if mapped < r.start { - mapped = r.start - } - if mapped > r.end { - mapped = r.end - } - } - } - - // niemals rückwärts - if mapped < job.Progress { - mapped = job.Progress - } - job.Progress = mapped -} diff --git a/backend/record_helpers_paths.go b/backend/record_paths.go similarity index 50% rename from backend/record_helpers_paths.go rename to backend/record_paths.go index 946bd7b..a3655ac 100644 --- a/backend/record_helpers_paths.go +++ b/backend/record_paths.go @@ -1,15 +1,90 @@ -// backend\record_helpers_paths.go - +// backend/record_paths.go package main import ( "fmt" "net/http" + "net/url" "os" + "path" "path/filepath" "strings" ) +// ---------- Basic query helpers ---------- + +func q(r *http.Request, key string) string { + return strings.TrimSpace(r.URL.Query().Get(key)) +} + +// file query -> safe basename (no traversal) + url decode +func safeBasenameQuery(r *http.Request, key string) (string, bool, error) { + raw := strings.TrimSpace(r.URL.Query().Get(key)) + if raw == "" { + return "", false, nil + } + dec, err := url.QueryUnescape(raw) + if err != nil { + return "", false, err + } + dec = strings.TrimSpace(dec) + if !isSafeBasename(dec) { + return "", false, fmt.Errorf("invalid basename") + } + return dec, true, nil +} + +func isAllowedVideoExt(name string) bool { + ext := strings.ToLower(filepath.Ext(name)) + return ext == ".mp4" || ext == ".ts" +} + +// ---------- Safe path pieces ---------- + +func isSafeRelDir(rel string) bool { + rel = strings.TrimSpace(rel) + if rel == "" { + return false + } + // normalize to slash for validation + rel = filepath.ToSlash(rel) + if strings.HasPrefix(rel, "/") { + return false + } + clean := path.Clean(rel) // path.Clean => forward slashes + if clean == "." { + return true + } + if strings.HasPrefix(clean, "../") || clean == ".." { + return false + } + // prevent weird traversal + if strings.Contains(clean, `\`) { + return false + } + return true +} + +func isSafeBasename(name string) bool { + name = strings.TrimSpace(name) + if name == "" { + return false + } + if strings.Contains(name, "/") || strings.Contains(name, "\\") { + return false + } + return filepath.Base(name) == name +} + +func setNoStoreHeaders(w http.ResponseWriter) { + // verhindert Browser/Proxy Caching (wichtig für Logs/Status) + w.Header().Set("Cache-Control", "no-store, max-age=0") + w.Header().Set("Pragma", "no-cache") + w.Header().Set("Expires", "0") +} + +// ---------- Resolve dirs ---------- + func resolvePathRelativeToApp(p string) (string, error) { p = strings.TrimSpace(p) if p == "" { @@ -84,14 +159,15 @@ func getDoneDir() string { return strings.TrimSpace(s.DoneDir) } +// ---------- Finders ---------- + func findVideoPath(file string) (string, error) { base := filepath.Base(file) // verhindert path traversal - // TODO: passe diese Root-Dirs an deine echten Pfade an: roots := []string{ - getRecordingsDir(), // z.B. downloads/output root - getDoneDir(), // ✅ NEU: fertige Dateien liegen typischerweise hier - getKeepDir(), // keep root + getRecordingsDir(), + getDoneDir(), + getKeepDir(), } // 1) direkt in den Roots @@ -123,13 +199,6 @@ func findVideoPath(file string) (string, error) { return "", os.ErrNotExist } -func setNoStoreHeaders(w http.ResponseWriter) { - // verhindert Browser/Proxy Caching (wichtig für Logs/Status) - w.Header().Set("Cache-Control", "no-store, max-age=0") - w.Header().Set("Pragma", "no-cache") - w.Header().Set("Expires", "0") -} - func findFileInDirOrOneLevelSubdirs(root string, file string, skipDirName string) (string, os.FileInfo, bool) { // direct p := filepath.Join(root, file) @@ -183,6 +252,8 @@ func durationFromMetaIfFresh(videoPath, assetDir string, fi os.FileInfo) (float6 return readVideoMetaDuration(metaPath, fi) } +// durationSecondsCacheOnly returns a cached duration if available and still fresh. +// It relies on your existing durCache implementation elsewhere. func durationSecondsCacheOnly(path string, fi os.FileInfo) float64 { durCache.mu.Lock() e, ok := durCache.m[path] @@ -193,3 +264,83 @@ func durationSecondsCacheOnly(path string, fi os.FileInfo) float64 { } return 0 } + +// ---------- Playback resolver (shared by video + scrubber/meta) ---------- + +// resolves a playable file path from ?file=... (done/keep/record) or ?id=... (jobs map) +// returns absolute cleaned path +func resolvePlayablePathFromQuery(r *http.Request) (string, bool, int, string) { + // returns: (path, ok, httpStatus, errMsg) + + // 1) file mode + if file, ok, err := safeBasenameQuery(r, "file"); err != nil { + return "", false, http.StatusBadRequest, "ungültiger file" + } else if ok { + if !isAllowedVideoExt(file) { + return "", false, http.StatusForbidden, "nicht erlaubt" + } + + s := getSettings() + recordAbs, err := resolvePathRelativeToApp(s.RecordDir) + if err != nil { + return "", false, http.StatusInternalServerError, "recordDir auflösung fehlgeschlagen: " + err.Error() + } + doneAbs, err := resolvePathRelativeToApp(s.DoneDir) + if err != nil { + return "", false, http.StatusInternalServerError, "doneDir auflösung fehlgeschlagen: " + err.Error() + } + + // candidates: allow .ts and fallback to .mp4 + ext := strings.ToLower(filepath.Ext(file)) + names := []string{file} + if ext == ".ts" { + names = append(names, strings.TrimSuffix(file, ext)+".mp4") + } + + for _, name := range names { + if p, _, ok := findFileInDirOrOneLevelSubdirs(doneAbs, name, "keep"); ok { + return filepath.Clean(strings.TrimSpace(p)), true, 0, "" + } + if p, _, ok := findFileInDirOrOneLevelSubdirs(filepath.Join(doneAbs, "keep"), name, ""); ok { + return filepath.Clean(strings.TrimSpace(p)), true, 0, "" + } + if p, _, ok := findFileInDirOrOneLevelSubdirs(recordAbs, name, ""); ok { + return filepath.Clean(strings.TrimSpace(p)), true, 0, "" + } + } + return "", false, http.StatusNotFound, "datei nicht gefunden" + } + + // 2) id mode + id := strings.TrimSpace(r.URL.Query().Get("id")) + if id == "" { + return "", false, http.StatusBadRequest, "id fehlt" + } + + jobsMu.Lock() + job, ok := jobs[id] + jobsMu.Unlock() + if !ok { + return "", false, http.StatusNotFound, "job nicht gefunden" + } + + outPath := filepath.Clean(strings.TrimSpace(job.Output)) + if outPath == "" { + return "", false, http.StatusNotFound, "output fehlt" + } + + if !filepath.IsAbs(outPath) { + abs, err := resolvePathRelativeToApp(outPath) + if err != nil { + return "", false, http.StatusInternalServerError, "pfad auflösung fehlgeschlagen: " + err.Error() + } + outPath = abs + } + + fi, err := os.Stat(outPath) + if err != nil || fi == nil || fi.IsDir() || fi.Size() == 0 { + return "", false, http.StatusNotFound, "datei nicht gefunden" + } + + return outPath, true, 0, "" +} diff --git a/backend/record_preview_scrubber.go b/backend/record_preview_scrubber.go deleted file mode 100644 index 7d5025c..0000000 --- a/backend/record_preview_scrubber.go +++ /dev/null @@ -1,122 +0,0 @@ -package main - -import ( - "fmt" - "math" - "net/http" - "net/url" - "strconv" - "strings" -) - -const defaultScrubberCount = 18 - -// /api/preview-scrubber/{index}?id=... (oder ?file=...) -func recordPreviewScrubberFrame(w http.ResponseWriter, r *http.Request) { - const prefix = "/api/preview-scrubber/" - if !strings.HasPrefix(r.URL.Path, prefix) { - http.NotFound(w, r) - return - } - - idxPart := strings.Trim(strings.TrimPrefix(r.URL.Path, prefix), "/") - if idxPart == "" { - http.Error(w, "missing scrubber frame index", http.StatusBadRequest) - return - } - - idx, err := strconv.Atoi(idxPart) - if err != nil || idx < 0 { - http.Error(w, "invalid scrubber frame index", http.StatusBadRequest) - return - } - - // id oder file muss vorhanden sein (wie bei recordPreview / recordDoneMeta) - q := r.URL.Query() - id := strings.TrimSpace(q.Get("id")) - file := strings.TrimSpace(q.Get("file")) - if id == "" && file == "" { - http.Error(w, "missing id or file", http.StatusBadRequest) - return - } - - // Dauer aus Meta ermitteln (WICHTIG für gleichmäßige Verteilung) - durSec, err := lookupDurationForScrubber(r, id, file) - if err != nil || durSec <= 0 { - // Fallback: wir versuchen trotzdem was Sinnvolles - // (z. B. 60s annehmen) – besser als gar kein Bild - durSec = 60 - } - - // Count: gleich wie im Frontend (oder dynamisch, aber dann auch im Payload liefern!) - count := defaultScrubberCount - if idx >= count { - // wenn Frontend mehr sendet als Backend erwartet -> clamp - idx = count - 1 - } - if count < 1 { - count = 1 - } - - t := scrubberIndexToTime(idx, count, durSec) - - // An bestehenden Preview-Handler delegieren via Redirect - // recordPreview unterstützt bei dir bereits ?id=...&t=... - targetQ := url.Values{} - if id != "" { - targetQ.Set("id", id) - } - if file != "" { - targetQ.Set("file", file) - } - targetQ.Set("t", fmt.Sprintf("%.3f", t)) - - // Cache freundlich (optional feinjustieren) - w.Header().Set("Cache-Control", "private, max-age=300") - - http.Redirect(w, r, "/api/preview?"+targetQ.Encode(), http.StatusFound) -} - -// Gleichmäßig über die Videolänge sampeln (Mitte des Segments) -func scrubberIndexToTime(index, count int, durationSec float64) float64 { - if count <= 1 { - return 0.1 - } - if durationSec <= 0 { - return 0.1 - } - - // nicht exakt bei 0 / nicht exakt am Ende - maxT := math.Max(0.1, durationSec-0.1) - ratio := (float64(index) + 0.5) / float64(count) - t := ratio * maxT - - if t < 0.1 { - t = 0.1 - } - if t > maxT { - t = maxT - } - return t -} - -// TODO: Hier deine bestehende Meta-Lookup-Logik aus recordDoneMeta wiederverwenden. -// Ziel: durationSeconds aus meta.json / job-meta lesen. -// Diese Funktion ist der einzige Teil, den du an dein Projekt anpassen musst. -func lookupDurationForScrubber(r *http.Request, id, file string) (float64, error) { - // ------------------------------------------------------------ - // OPTION A (empfohlen): dieselbe interne Funktion nutzen wie recordDoneMeta - // Beispiel (PSEUDO): - // - // meta, err := loadDoneMetaByIDOrFile(id, file) - // if err != nil { return 0, err } - // if d := meta.DurationSeconds; d > 0 { return d, nil } - // - // ------------------------------------------------------------ - - // ------------------------------------------------------------ - // OPTION B: Wenn du aktuell keine Helper-Funktion hast: - // erstmal Fehler zurückgeben und später konkret anschließen. - // ------------------------------------------------------------ - return 0, fmt.Errorf("lookupDurationForScrubber not wired yet") -} diff --git a/backend/record_preview_sprite.go b/backend/record_preview_sprite.go deleted file mode 100644 index 1ab07e8..0000000 --- a/backend/record_preview_sprite.go +++ /dev/null @@ -1,67 +0,0 @@ -// backend\record_preview_sprite.go - -package main - -import ( - "net/http" - "os" - "path/filepath" - "strings" -) - -func recordPreviewSprite(w http.ResponseWriter, r *http.Request) { - if r.Method != http.MethodGet && r.Method != http.MethodHead { - http.Error(w, "Nur GET/HEAD", http.StatusMethodNotAllowed) - return - } - - // Unterstützt beide Prefixe (falls du mal testweise /api/preview-sprite/ nutzt) - id := strings.TrimPrefix(r.URL.Path, "/api/record/preview-sprite/") - if id == r.URL.Path { - id = strings.TrimPrefix(r.URL.Path, "/api/preview-sprite/") - } - id = strings.TrimSpace(id) - - // Falls jemand versehentlich einen Slash am Ende schickt - id = strings.Trim(id, "/") - - if id == "" { - http.Error(w, "id fehlt", http.StatusBadRequest) - return - } - - var err error - id, err = sanitizeID(id) - if err != nil { - http.Error(w, "ungültige id", http.StatusBadRequest) - return - } - - dir, err := generatedDirForID(id) - if err != nil { - http.Error(w, "ungültige id", http.StatusBadRequest) - return - } - - spritePath := filepath.Join(dir, "preview-sprite.webp") - - fi, err := os.Stat(spritePath) - if err != nil || fi.IsDir() || fi.Size() <= 0 { - http.NotFound(w, r) - return - } - - f, err := os.Open(spritePath) - if err != nil { - http.NotFound(w, r) - return - } - defer f.Close() - - // Cachebar (du hängst im Frontend ?v=updatedAtUnix dran) - w.Header().Set("Content-Type", "image/webp") - w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") - w.Header().Set("X-Content-Type-Options", "nosniff") - - http.ServeContent(w, r, "preview-sprite.webp", fi.ModTime(), f) -} diff --git a/backend/record_start.go b/backend/recorder.go similarity index 62% rename from backend/record_start.go rename to backend/recorder.go index 4a33e0b..7bb31eb 100644 --- a/backend/record_start.go +++ b/backend/recorder.go @@ -1,5 +1,4 @@ -// backend\record_start.go - +// backend/recorder.go package main import ( @@ -7,27 +6,259 @@ import ( "errors" "fmt" "math" + "net/http" + "net/url" "os" "path/filepath" + "strconv" "strings" "time" "github.com/google/uuid" ) +// ---------------- Progress mapping ---------------- + +func setJobProgress(job *RecordJob, phase string, pct int) { + phase = strings.TrimSpace(phase) + phaseLower := strings.ToLower(phase) + + if pct < 0 { + pct = 0 + } + if pct > 100 { + pct = 100 + } + + type rng struct{ start, end int } + rangeFor := func(ph string) rng { + switch ph { + case "postwork": + return rng{0, 5} + case "remuxing": + return rng{5, 65} + case "moving": + return rng{65, 75} + case "probe": + return rng{75, 80} + case "assets": + return rng{80, 99} + default: + return rng{0, 100} + } + } + + jobsMu.Lock() + defer jobsMu.Unlock() + + inPostwork := job.EndedAt != nil || (strings.TrimSpace(job.Phase) != "" && strings.ToLower(strings.TrimSpace(job.Phase)) != "recording") + if inPostwork { + if phaseLower == "" || phaseLower == "recording" { + return + } + } + + if phase != "" { + job.Phase = phase + } + + if phaseLower == "postwork" && pct == 0 { + job.Progress = 0 + return + } + + mapped := pct + + if inPostwork { + r := rangeFor(phaseLower) + if r.end >= r.start { + if pct >= r.start && pct <= r.end { + mapped = pct + } else { + width := float64(r.end - r.start) + mapped = r.start + int(math.Round((float64(pct)/100.0)*width)) + } + if mapped < r.start { + mapped = r.start + } + if mapped > r.end { + mapped = r.end + } + } + } + + if mapped < job.Progress { + mapped = job.Progress + } + job.Progress = mapped +} + +// ---------------- Preview scrubber ---------------- + +const defaultScrubberCount = 18 + +// /api/preview-scrubber/{index}?id=... (oder ?file=...) +func recordPreviewScrubberFrame(w http.ResponseWriter, r *http.Request) { + const prefix = "/api/preview-scrubber/" + if !strings.HasPrefix(r.URL.Path, prefix) { + http.NotFound(w, r) + return + } + + idxPart := strings.Trim(strings.TrimPrefix(r.URL.Path, prefix), "/") + if idxPart == "" { + http.Error(w, "missing scrubber frame index", http.StatusBadRequest) + return + } + + idx, err := strconv.Atoi(idxPart) + if err != nil || idx < 0 { + http.Error(w, "invalid scrubber frame index", http.StatusBadRequest) + return + } + + q := r.URL.Query() + id := strings.TrimSpace(q.Get("id")) + file := strings.TrimSpace(q.Get("file")) + if id == "" && file == "" { + http.Error(w, "missing id or file", http.StatusBadRequest) + return + } + + durSec, err := lookupDurationForScrubber(r) + if err != nil || durSec <= 0 { + durSec = 60 + } + + count := defaultScrubberCount + if idx >= count { + idx = count - 1 + } + if count < 1 { + count = 1 + } + + t := scrubberIndexToTime(idx, count, durSec) + + targetQ := url.Values{} + if id != "" { + targetQ.Set("id", id) + } + if file != "" { + targetQ.Set("file", file) + } + targetQ.Set("t", fmt.Sprintf("%.3f", t)) + + w.Header().Set("Cache-Control", "private, max-age=300") + http.Redirect(w, r, "/api/preview?"+targetQ.Encode(), http.StatusFound) +} + +// Gleichmäßig über die Videolänge sampeln (Mitte des Segments) +func scrubberIndexToTime(index, count int, durationSec float64) float64 { + if count <= 1 { + return 0.1 + } + if durationSec <= 0 { + return 0.1 + } + + maxT := math.Max(0.1, durationSec-0.1) + ratio := (float64(index) + 0.5) / float64(count) + t := ratio * maxT + + if t < 0.1 { + t = 0.1 + } + if t > maxT { + t = maxT + } + return t +} + +func lookupDurationForScrubber(r *http.Request) (float64, error) { + path, ok, _, _ := resolvePlayablePathFromQuery(r) + if !ok || strings.TrimSpace(path) == "" { + return 0, fmt.Errorf("unable to resolve file") + } + + // best-effort meta + ensureMetaJSONForPlayback(r.Context(), path) + + ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second) + defer cancel() + sec, err := durationSecondsCached(ctx, path) + if err != nil { + return 0, err + } + return sec, nil +} + +// ---------------- Preview sprite file handler ---------------- + +func recordPreviewSprite(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet && r.Method != http.MethodHead { + http.Error(w, "Nur GET/HEAD", http.StatusMethodNotAllowed) + return + } + + id := strings.TrimPrefix(r.URL.Path, "/api/record/preview-sprite/") + if id == r.URL.Path { + id = strings.TrimPrefix(r.URL.Path, "/api/preview-sprite/") + } + id = strings.TrimSpace(id) + id = strings.Trim(id, "/") + + if id == "" { + http.Error(w, "id fehlt", http.StatusBadRequest) + return + } + + var err error + id, err = sanitizeID(id) + if err != nil { + http.Error(w, "ungültige id", http.StatusBadRequest) + return + } + + dir, err := generatedDirForID(id) + if err != nil { + http.Error(w, "ungültige id", http.StatusBadRequest) + return + } + + spritePath := filepath.Join(dir, "preview-sprite.webp") + + fi, err := os.Stat(spritePath) + if err != nil || fi.IsDir() || fi.Size() <= 0 { + http.NotFound(w, r) + return + } + + f, err := os.Open(spritePath) + if err != nil { + http.NotFound(w, r) + return + } + defer f.Close() + + w.Header().Set("Content-Type", "image/webp") + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + w.Header().Set("X-Content-Type-Options", "nosniff") + + http.ServeContent(w, r, "preview-sprite.webp", fi.ModTime(), f) +} + +// ---------------- Start + run job ---------------- + func startRecordingInternal(req RecordRequest) (*RecordJob, error) { url := strings.TrimSpace(req.URL) if url == "" { return nil, errors.New("url fehlt") } - // Duplicate-running guard (identische URL) jobsMu.Lock() for _, j := range jobs { - // ✅ Nur blocken, solange wirklich noch aufgenommen wird. - // Sobald EndedAt gesetzt ist (Postwork/Queue läuft), darf ein neuer Download starten. if j != nil && j.Status == JobRunning && j.EndedAt == nil && strings.TrimSpace(j.SourceURL) == url { - // ✅ Wenn ein versteckter Auto-Check-Job läuft und der User manuell startet -> sofort sichtbar machen if j.Hidden && !req.Hidden { j.Hidden = false jobsMu.Unlock() @@ -41,11 +272,9 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) { } } - // ✅ Timestamp + Output schon hier setzen, damit UI sofort Model/Filename/Details hat startedAt := time.Now() provider := detectProvider(url) - // best-effort Username aus URL username := "" switch provider { case "chaturbate": @@ -57,10 +286,8 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) { username = "unknown" } - // Dateiname (konsistent zu runJob: gleicher Timestamp) filename := fmt.Sprintf("%s_%s.ts", username, startedAt.Format("01_02_2006__15-04-05")) - // best-effort: absoluter RecordDir (fallback auf Settings-Wert) s := getSettings() recordDirAbs, _ := resolvePathRelativeToApp(s.RecordDir) recordDir := strings.TrimSpace(recordDirAbs) @@ -77,7 +304,7 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) { SourceURL: url, Status: JobRunning, StartedAt: startedAt, - StartedAtMs: startedAt.UnixMilli(), // ✅ NEU + StartedAtMs: startedAt.UnixMilli(), Output: outPath, Hidden: req.Hidden, cancel: cancel, @@ -86,7 +313,6 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) { jobs[jobID] = job jobsMu.Unlock() - // ✅ NEU: Hidden-Jobs nicht sofort ins UI broadcasten if !job.Hidden { notifyJobsChanged() } @@ -101,13 +327,11 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { var err error - // ✅ nutze den Timestamp vom Job (damit Start/Output konsistent sind) now := job.StartedAt if now.IsZero() { now = time.Now() } - // ✅ falls StartedAtMs aus irgendeinem Grund leer ist if job.StartedAtMs == 0 { base := job.StartedAt if base.IsZero() { @@ -121,11 +345,9 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { jobsMu.Unlock() } - // ✅ Phase für Recording explizit setzen (damit spätere Progress-Writer das erkennen können) setJobProgress(job, "recording", 0) notifyJobsChanged() - // ---- Aufnahme starten (Output-Pfad sauber relativ zur EXE auflösen) ---- switch provider { case "chaturbate": if !hasChaturbateCookies(req.Cookie) { @@ -144,7 +366,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { username := extractUsername(req.URL) filename := fmt.Sprintf("%s_%s.ts", username, now.Format("01_02_2006__15-04-05")) - // ✅ wenn Output schon beim Start gesetzt wurde, nutze ihn (falls absolut) jobsMu.Lock() existingOut := strings.TrimSpace(job.Output) jobsMu.Unlock() @@ -154,7 +375,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { outPath = filepath.Join(recordDirAbs, filename) } - // Output nur aktualisieren, wenn es sich ändert if strings.TrimSpace(existingOut) != strings.TrimSpace(outPath) { jobsMu.Lock() job.Output = outPath @@ -192,10 +412,8 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { fmt.Println("❌ [record]", provider, job.SourceURL, "->", err) } - // ---- Recording fertig: EndedAt/Error setzen ---- end := time.Now() - // Zielstatus bestimmen (finaler Status wird erst NACH Postwork gesetzt!) target := JobFinished var errText string if err != nil { @@ -207,25 +425,19 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { } } - // direkt nach provider record endet (egal ob err != nil oder nil) stopPreview(job) - // EndedAt + Error speichern (kurz locken) jobsMu.Lock() job.EndedAt = &end - job.EndedAtMs = end.UnixMilli() // ✅ NEU + job.EndedAtMs = end.UnixMilli() if errText != "" { job.Error = errText } - - // ✅ WICHTIG: sofort Phase wechseln, damit Recorder-Progress danach nichts mehr “zurücksetzt” job.Phase = "postwork" - out := strings.TrimSpace(job.Output) jobsMu.Unlock() notifyJobsChanged() - // Falls Output fehlt (z.B. provider error), direkt final status setzen if out == "" { jobsMu.Lock() job.Status = target @@ -239,17 +451,13 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { return } - // ✅ NEU: Bevor Postwork queued wird -> kleine Downloads direkt löschen - // (spart Remux/Move/ffprobe/assets komplett) + // pre-queue auto delete (small) { s := getSettings() minMB := s.AutoDeleteSmallDownloadsBelowMB if s.AutoDeleteSmallDownloads && minMB > 0 { threshold := int64(minMB) * 1024 * 1024 - - // out ist i.d.R. absolut; Stat ist cheap if fi, serr := os.Stat(out); serr == nil && fi != nil && !fi.IsDir() { - // Size auch ins Job-JSON schreiben (nice fürs UI, selbst wenn wir danach löschen) jobsMu.Lock() job.SizeBytes = fi.Size() jobsMu.Unlock() @@ -263,7 +471,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { removeGeneratedForID(id) purgeDurationCacheForPath(out) - // Job komplett entfernen (wie dein späterer Auto-Delete-Block) jobsMu.Lock() delete(jobs, job.ID) jobsMu.Unlock() @@ -274,32 +481,23 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { if shouldLogRecordInfo(req) { fmt.Println("🧹 auto-deleted (pre-queue):", base, "(size: "+formatBytesSI(fi.Size())+")") } - return } else { fmt.Println("⚠️ auto-delete (pre-queue) failed:", derr) - // wenn delete fehlschlägt -> normal weiter in Postwork } } } } } - // ✅ Postwork: remux/move/ffprobe/assets begrenzen -> in Queue + // postwork queue postOut := out postTarget := target - postKey := "postwork:" + job.ID - // ✅ WICHTIG: - // - Status noch NICHT auf JobStopped/JobFinished setzen, sonst verschwindet er aus der Downloads-Tabelle. - // - Stattdessen Phase "postwork" + Progress hochsetzen (monoton). - // - Zusätzlich: PostWorkKey setzen + initialen Queue-Status ins Job-JSON hängen. jobsMu.Lock() job.Phase = "postwork" - job.PostWorkKey = postKey - // initialer Status (meist "missing", bis Enqueue done ist – wir updaten direkt danach nochmal) { s := postWorkQ.StatusForKey(postKey) job.PostWork = &s @@ -311,19 +509,14 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { Key: postKey, Added: time.Now(), Run: func(ctx context.Context) error { - // beim Start: Queue-Status refresh (sollte jetzt "running" werden) { st := postWorkQ.StatusForKey(postKey) - jobsMu.Lock() job.PostWork = &st jobsMu.Unlock() - // optisches "queued" bumping setJobProgress(job, "postwork", 0) - notifyJobsChanged() - } out := strings.TrimSpace(postOut) @@ -340,21 +533,16 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { return nil } - // Helper: Progress nur nach oben (gegen "rückwärts") setPhase := func(phase string, pct int) { - // Phase+Progress inkl. Mapping/Monotonie setJobProgress(job, phase, pct) - - // Queue-Status aktuell halten st := postWorkQ.StatusForKey(postKey) jobsMu.Lock() job.PostWork = &st jobsMu.Unlock() - notifyJobsChanged() } - // 1) Remux (nur wenn TS) + // 1) Remux if strings.EqualFold(filepath.Ext(out), ".ts") { setPhase("remuxing", 72) if newOut, err2 := maybeRemuxTSForJob(job, out); err2 == nil && strings.TrimSpace(newOut) != "" { @@ -366,7 +554,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { } } - // 2) Move to done (best-effort) + // 2) Move to done setPhase("moving", 78) if moved, err2 := moveToDoneDir(out); err2 == nil && strings.TrimSpace(moved) != "" { out = strings.TrimSpace(moved) @@ -377,7 +565,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { notifyDoneChanged() } - // 3) Dauer (ffprobe) + // 3) Duration setPhase("probe", 84) { dctx, cancel := context.WithTimeout(ctx, 6*time.Second) @@ -390,13 +578,12 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { cancel() } - // 5) Video-Props + // 4) Video props setPhase("probe", 86) { pctx, cancel := context.WithTimeout(ctx, 6*time.Second) w, h, fps, perr := probeVideoProps(pctx, out) cancel() - if perr == nil { jobsMu.Lock() job.VideoWidth = w @@ -407,7 +594,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { } } - // 6) Assets (preview.webp + preview.mp4) + // 5) Assets with progress const ( assetsStart = 86 assetsEnd = 99 @@ -425,7 +612,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { if r > 1 { r = 1 } - pct := assetsStart + int(math.Round(r*float64(assetsEnd-assetsStart))) if pct < assetsStart { pct = assetsStart @@ -433,7 +619,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { if pct > assetsEnd { pct = assetsEnd } - if pct == lastPct { return } @@ -445,12 +630,12 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { setPhase("assets", pct) } - if err := ensureAssetsForVideoWithProgress(out, job.SourceURL, update); err != nil { + if _, err := ensureAssetsForVideoWithProgressCtx(ctx, out, job.SourceURL, update); err != nil { fmt.Println("⚠️ ensureAssetsForVideo:", err) } setPhase("assets", assetsEnd) - // 7) Finalize: JETZT finalen Status setzen (damit er erst dann aus Downloads verschwindet) + // Finalize jobsMu.Lock() job.Status = postTarget job.Phase = "" @@ -460,20 +645,17 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { jobsMu.Unlock() notifyJobsChanged() notifyDoneChanged() - return nil }, }) if okQueued { - // ✅ direkt nach erfolgreichem Enqueue nochmal Status holen (nun "queued" + Position möglich) st := postWorkQ.StatusForKey(postKey) jobsMu.Lock() job.PostWork = &st jobsMu.Unlock() notifyJobsChanged() } else { - // Queue voll -> Fallback: finalisieren jobsMu.Lock() job.Status = postTarget job.Phase = "" @@ -484,6 +666,4 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { notifyJobsChanged() notifyDoneChanged() } - - return } diff --git a/backend/routes.go b/backend/routes.go index a010899..6abd3cd 100644 --- a/backend/routes.go +++ b/backend/routes.go @@ -47,6 +47,7 @@ func registerRoutes(mux *http.ServeMux, auth *AuthManager) *ModelStore { api.HandleFunc("/api/record/status", recordStatus) api.HandleFunc("/api/record/stop", recordStop) api.HandleFunc("/api/preview", recordPreview) + api.HandleFunc("/api/preview/live", recordPreviewLive) api.HandleFunc("/api/preview-scrubber/", recordPreviewScrubberFrame) api.HandleFunc("/api/preview-sprite/", recordPreviewSprite) api.HandleFunc("/api/record/list", recordList) @@ -136,10 +137,14 @@ func buildPostgresDSNFromSettings() (string, error) { return "", fmt.Errorf("databaseUrl ungültig: %w", err) } - // 1) Wenn URL bereits Passwort enthält -> direkt verwenden + // 1) Wenn URL bereits Passwort enthält -> nur verwenden, wenn es NICHT der Placeholder ist if u.User != nil { - if _, hasPw := u.User.Password(); hasPw { - return u.String(), nil + if pw, hasPw := u.User.Password(); hasPw { + pw = strings.TrimSpace(pw) + if pw != "" && pw != "****" { + return u.String(), nil + } + // sonst: Placeholder -> ignorieren und unten aus EncryptedDBPassword einsetzen } } diff --git a/backend/settings.go b/backend/settings.go index b057bfe..f18dc98 100644 --- a/backend/settings.go +++ b/backend/settings.go @@ -140,6 +140,14 @@ func loadSettings() { } } } + + // ✅ WICHTIG: Migrationsergebnis zurück in den globalen settings-State schreiben + settingsMu.Lock() + settings = s + settingsMu.Unlock() + + // optional aber sinnvoll: Migration auch persistieren + saveSettingsToDisk() } // Ordner sicherstellen @@ -315,6 +323,10 @@ func recordSettingsHandler(w http.ResponseWriter, r *http.Request) { // 2) Migration: wenn in.DatabaseURL ein Passwort enthält, extrahieren // und URL ohne Passwort zurückschreiben. sanitizedURL, pwFromURL := stripPasswordFromPostgresURL(in.DatabaseURL) + pwFromURL = strings.TrimSpace(pwFromURL) + if pwFromURL == "****" { + pwFromURL = "" + } if sanitizedURL != "" { in.DatabaseURL = sanitizedURL } diff --git a/backend/cleanup.go b/backend/tasks_cleanup.go similarity index 66% rename from backend/cleanup.go rename to backend/tasks_cleanup.go index e064141..d8a9c1f 100644 --- a/backend/cleanup.go +++ b/backend/tasks_cleanup.go @@ -1,13 +1,17 @@ -// backend\cleanup.go +// backend\tasks_cleanup.go package main import ( "encoding/json" + "fmt" + "io/fs" "net/http" "os" "path/filepath" "strings" + "sync/atomic" + "time" ) type cleanupResp struct { @@ -19,10 +23,6 @@ type cleanupResp struct { DeletedBytesHuman string `json:"deletedBytesHuman"` ErrorCount int `json:"errorCount"` - // Orphans cleanup (previews/thumbs/generated ohne passende Video-Datei) - OrphanIDsScanned int `json:"orphanIdsScanned"` - OrphanIDsRemoved int `json:"orphanIdsRemoved"` - // ✅ NEU: Generated-GC separat (nicht in orphanIds reinmischen) GeneratedOrphansChecked int `json:"generatedOrphansChecked"` GeneratedOrphansRemoved int `json:"generatedOrphansRemoved"` @@ -76,9 +76,6 @@ func settingsCleanupHandler(w http.ResponseWriter, r *http.Request) { cleanupSmallFiles(doneAbs, threshold, &resp) } - // 2) Orphans entfernen (immer sinnvoll, unabhängig von mb) - cleanupOrphanAssets(doneAbs, &resp) - // ✅ Beim manuellen Aufräumen: Generated-GC synchron laufen lassen, // damit die Zahlen in der JSON-Response landen. gcStats := triggerGeneratedGarbageCollectorSync() @@ -151,8 +148,6 @@ func cleanupSmallFiles(doneAbs string, threshold int64, resp *cleanupResp) { // generated + legacy cleanup (best effort) if strings.TrimSpace(id) != "" { removeGeneratedForID(id) - _ = os.RemoveAll(filepath.Join(doneAbs, "preview", id)) - _ = os.RemoveAll(filepath.Join(doneAbs, "thumbs", id)) } purgeDurationCacheForPath(p) @@ -189,8 +184,6 @@ func cleanupSmallFiles(doneAbs string, threshold int64, resp *cleanupResp) { if strings.TrimSpace(id) != "" { removeGeneratedForID(id) - _ = os.RemoveAll(filepath.Join(doneAbs, "preview", id)) - _ = os.RemoveAll(filepath.Join(doneAbs, "thumbs", id)) } purgeDurationCacheForPath(full) @@ -204,107 +197,123 @@ func cleanupSmallFiles(doneAbs string, threshold int64, resp *cleanupResp) { scanDir(doneAbs, true) } -// Orphans = Preview/Thumbs/Generated IDs, für die keine Video-Datei im doneAbs existiert. -func cleanupOrphanAssets(doneAbs string, resp *cleanupResp) { - // 1) Existierende Video-IDs einsammeln - existingIDs := collectExistingVideoIDs(doneAbs) +var generatedGCRunning int32 - // 2) Orphan-IDs aus preview/thumbs ermitteln - previewDir := filepath.Join(doneAbs, "preview") - thumbsDir := filepath.Join(doneAbs, "thumbs") +type generatedGCStats struct { + Checked int + Removed int +} - ids := make(map[string]struct{}) +// Läuft synchron und liefert Zahlen zurück (für /api/settings/cleanup Response). +func triggerGeneratedGarbageCollectorSync() generatedGCStats { + // nur 1 GC gleichzeitig + if !atomic.CompareAndSwapInt32(&generatedGCRunning, 0, 1) { + fmt.Println("🧹 [gc] skip: already running") + return generatedGCStats{} + } + defer atomic.StoreInt32(&generatedGCRunning, 0) - addDirChildrenAsIDs := func(dir string) { - ents, err := os.ReadDir(dir) + stats := runGeneratedGarbageCollector() + return stats +} + +// Läuft 1× nach Serverstart (mit Delay), löscht /generated/* Orphans. +func startGeneratedGarbageCollector() { + go func() { + time.Sleep(3 * time.Second) + triggerGeneratedGarbageCollectorSync() + }() +} + +// Core-Logik ohne Delay (für manuelle Trigger, z.B. nach Cleanup) +// Liefert Stats zurück, damit /api/settings/cleanup die Zahlen anzeigen kann. +func runGeneratedGarbageCollector() generatedGCStats { + stats := generatedGCStats{} + + s := getSettings() + + doneAbs, err := resolvePathRelativeToApp(s.DoneDir) + if err != nil { + fmt.Println("🧹 [gc] resolve doneDir failed:", err) + return stats + } + doneAbs = strings.TrimSpace(doneAbs) + if doneAbs == "" { + return stats + } + + // 1) Live-IDs sammeln: alle mp4/ts unter /done (rekursiv), .trash ignorieren + live := make(map[string]struct{}, 4096) + + _ = filepath.WalkDir(doneAbs, func(p string, d fs.DirEntry, err error) error { if err != nil { - return + return nil } - for _, e := range ents { + + name := d.Name() + + if d.IsDir() { + if strings.EqualFold(name, ".trash") { + return fs.SkipDir + } + return nil + } + + ext := strings.ToLower(filepath.Ext(name)) + if ext != ".mp4" && ext != ".ts" { + return nil + } + + info, err := d.Info() + if err != nil || info.IsDir() || info.Size() <= 0 { + return nil + } + + base := strings.TrimSuffix(name, ext) + id, err := sanitizeID(stripHotPrefix(base)) + if err != nil || id == "" { + return nil + } + + live[id] = struct{}{} + return nil + }) + + // 2) /generated/meta/ prüfen + metaRoot, err := generatedMetaRoot() + if err == nil { + metaRoot = strings.TrimSpace(metaRoot) + } + if err != nil || metaRoot == "" { + return stats + } + + removedMeta := 0 + checkedMeta := 0 + + if entries, err := os.ReadDir(metaRoot); err == nil { + for _, e := range entries { if !e.IsDir() { continue } id := strings.TrimSpace(e.Name()) - if id == "" { + if id == "" || strings.HasPrefix(id, ".") { continue } - ids[id] = struct{}{} + + checkedMeta++ + if _, ok := live[id]; ok { + continue + } + + removeGeneratedForID(id) + removedMeta++ } } - addDirChildrenAsIDs(previewDir) - addDirChildrenAsIDs(thumbsDir) + fmt.Printf("🧹 [gc] generated/meta checked=%d removed_orphans=%d\n", checkedMeta, removedMeta) + stats.Checked += checkedMeta + stats.Removed += removedMeta - resp.OrphanIDsScanned = len(ids) - - // 3) Alles löschen, was nicht mehr existiert - for id := range ids { - if _, ok := existingIDs[id]; ok { - continue - } - - // remove generated artifacts (best effort) - removeGeneratedForID(id) - - // remove legacy preview/thumbs - _ = os.RemoveAll(filepath.Join(previewDir, id)) - _ = os.RemoveAll(filepath.Join(thumbsDir, id)) - - resp.OrphanIDsRemoved++ - } -} - -func collectExistingVideoIDs(doneAbs string) map[string]struct{} { - out := make(map[string]struct{}) - - isCandidate := func(name string) bool { - low := strings.ToLower(name) - if strings.Contains(low, ".part") || strings.Contains(low, ".tmp") { - return false - } - ext := strings.ToLower(filepath.Ext(name)) - return ext == ".mp4" || ext == ".ts" - } - - addFile := func(p string) { - name := filepath.Base(p) - if !isCandidate(name) { - return - } - base := strings.TrimSuffix(name, filepath.Ext(name)) - id := stripHotPrefix(base) - id = strings.TrimSpace(id) - if id != "" { - out[id] = struct{}{} - } - } - - // root + 1-level subdirs (skip keep) - ents, err := os.ReadDir(doneAbs) - if err != nil { - return out - } - - for _, e := range ents { - full := filepath.Join(doneAbs, e.Name()) - if e.IsDir() { - if e.Name() == "keep" { - continue - } - sub, err := os.ReadDir(full) - if err != nil { - continue - } - for _, se := range sub { - if se.IsDir() { - continue - } - addFile(filepath.Join(full, se.Name())) - } - continue - } - addFile(full) - } - - return out + return stats } diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index c1c75a9..3228821 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -16,7 +16,7 @@ import ModelDetails from './components/ui/ModelDetails' import { SignalIcon, HeartIcon, HandThumbUpIcon, EyeIcon } from '@heroicons/react/24/solid' import PerformanceMonitor from './components/ui/PerformanceMonitor' import { useNotify } from './components/ui/notify' -import { startChaturbateOnlinePolling } from './lib/chaturbateOnlinePoller' +//import { startChaturbateOnlinePolling } from './lib/chaturbateOnlinePoller' import CategoriesTab from './components/ui/CategoriesTab' import LoginPage from './components/ui/LoginPage' @@ -258,6 +258,16 @@ export default function App() { const [authChecked, setAuthChecked] = useState(false) const [authed, setAuthed] = useState(false) + const sourceUrlInputRef = useRef(null) + + const selectSourceUrl = useCallback(() => { + const el = sourceUrlInputRef.current + if (!el) return + // Fokus sicherstellen, dann alles markieren + el.focus() + // rAF, damit der Fokus sicher "sitzt" (und für Mobile/Safari stabiler) + requestAnimationFrame(() => el.select()) + }, []) const checkAuth = useCallback(async () => { try { @@ -2401,288 +2411,6 @@ export default function App() { } }, [autoAddEnabled, autoStartEnabled, enqueueStart]) - useEffect(() => { - const stop = startChaturbateOnlinePolling({ - getModels: () => { - if (!recSettingsRef.current.useChaturbateApi) return [] - - const modelsMap = modelsByKeyRef.current - const pendingMap = pendingAutoStartByKeyRef.current - - const watchedKeysLower = Object.values(modelsMap) - .filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate')) - .map((m) => String(m?.modelKey ?? '').trim().toLowerCase()) - .filter(Boolean) - - const queuedKeysLower = Object.keys(pendingMap || {}) - .map((k) => String(k || '').trim().toLowerCase()) - .filter(Boolean) - - // ✅ NUR watched + queued pollen (Store kann riesig sein -> lag) - // Wenn du Store-Online später willst: extra, seltener Poll (z.B. 60s) separat lösen. - return Array.from(new Set([...watchedKeysLower, ...queuedKeysLower])) - }, - - getShow: () => ['public', 'private', 'hidden', 'away'], - - intervalMs: 8000, - - onData: (data: ChaturbateOnlineResponse) => { - void (async () => { - if (!data?.enabled) { - setCbOnlineByKeyLower({}) - cbOnlineByKeyLowerRef.current = {} - lastCbShowByKeyLowerRef.current = {} - setPendingWatchedRooms([]) - everCbOnlineByKeyLowerRef.current = {} - cbOnlineInitDoneRef.current = false - lastCbOnlineByKeyLowerRef.current = {} - setLastHeaderUpdateAtMs(Date.now()) - return - } - - const nextSnap: Record = {} - for (const r of Array.isArray(data.rooms) ? data.rooms : []) { - const u = String(r?.username ?? '').trim().toLowerCase() - if (u) nextSnap[u] = r - } - - setCbOnlineByKeyLower(nextSnap) - cbOnlineByKeyLowerRef.current = nextSnap - - // ✅ Toasts: (A) watched offline->online, (B) waiting->public, (C) online->offline->online => "wieder online" - try { - const notificationsOn = Boolean((recSettingsRef.current as any).enableNotifications ?? true) - const waiting = new Set(['private', 'away', 'hidden']) - - // watched-Keys (nur Chaturbate) - const watchedSetLower = new Set( - Object.values(modelsByKeyRef.current || {}) - .filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate')) - .map((m) => String(m?.modelKey ?? '').trim().toLowerCase()) - .filter(Boolean) - ) - - const prevShow = lastCbShowByKeyLowerRef.current || {} - const nextShowMap: Record = { ...prevShow } - - const prevOnline = lastCbOnlineByKeyLowerRef.current || {} - const isInitial = !cbOnlineInitDoneRef.current - - // ✅ "war schon mal online" Snapshot (vor diesem Poll) - const everOnline = everCbOnlineByKeyLowerRef.current || {} - const nextEverOnline: Record = { ...everOnline } - - for (const [keyLower, room] of Object.entries(nextSnap)) { - const nowShow = String((room as any)?.current_show ?? '').toLowerCase().trim() - const beforeShow = String(prevShow[keyLower] ?? '').toLowerCase().trim() - - const wasOnline = Boolean(prevOnline[keyLower]) - const isOnline = true // weil es in nextSnap ist - const becameOnline = isOnline && !wasOnline - - // ✅ war irgendwann schon mal online (vor diesem Poll)? - const hadEverBeenOnline = Boolean(everOnline[keyLower]) - - const modelName = String((room as any)?.username ?? keyLower).trim() || keyLower - const imageUrl = String((room as any)?.image_url ?? '').trim() - - // immer merken: jetzt ist es online - nextEverOnline[keyLower] = true - - // (B) waiting -> public => "wieder online" (höchste Priorität, damit kein Doppel-Toast) - const becamePublicFromWaiting = nowShow === 'public' && waiting.has(beforeShow) - if (becamePublicFromWaiting) { - if (notificationsOn) { - notify.info(modelName, 'ist wieder online.', { - imageUrl, - imageAlt: `${modelName} Vorschau`, - durationMs: 5500, - onClick: () => { - window.dispatchEvent( - new CustomEvent('open-model-details', { - detail: { modelKey: modelName }, - }) - ) - }, - }) - } - - if (nowShow) nextShowMap[keyLower] = nowShow - continue - } - - // (A/C) watched: offline -> online - if (watchedSetLower.has(keyLower) && becameOnline) { - // C: online->offline->online => "wieder online" - const cameBackFromOffline = hadEverBeenOnline - - // Startup-Spam vermeiden - if (notificationsOn && !isInitial) { - notify.info( - modelName, - cameBackFromOffline ? 'ist wieder online.' : 'ist online.', - { - imageUrl, - imageAlt: `${modelName} Vorschau`, - durationMs: 5500, - onClick: () => { - window.dispatchEvent( - new CustomEvent('open-model-details', { - detail: { modelKey: modelName }, - }) - ) - }, - } - ) - } - } - - if (nowShow) nextShowMap[keyLower] = nowShow - } - - // Presence-Snapshot merken - const nextOnline: Record = {} - for (const k of Object.keys(nextSnap)) nextOnline[k] = true - lastCbOnlineByKeyLowerRef.current = nextOnline - - // ✅ "ever online" merken - everCbOnlineByKeyLowerRef.current = nextEverOnline - - cbOnlineInitDoneRef.current = true - lastCbShowByKeyLowerRef.current = nextShowMap - } catch { - // ignore - } - - // Online-Keys für Store - const storeKeys = chaturbateStoreKeysLowerRef.current - const nextOnlineStore: Record = {} - for (const k of storeKeys || []) { - const kl = String(k || '').trim().toLowerCase() - if (kl && nextSnap[kl]) nextOnlineStore[kl] = true - } - - // Pending Watched Rooms (nur im running Tab) - if (!recSettingsRef.current.useChaturbateApi) { - setPendingWatchedRooms([]) - } else if (selectedTabRef.current !== 'running') { - // optional: nicht leeren - } else { - const modelsMap = modelsByKeyRef.current - const pendingMap = pendingAutoStartByKeyRef.current - - const watchedKeysLower = Array.from( - new Set( - Object.values(modelsMap) - .filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate')) - .map((m) => String(m?.modelKey ?? '').trim().toLowerCase()) - .filter(Boolean) - ) - ) - - const queuedKeysLower = Object.keys(pendingMap || {}) - .map((k) => String(k || '').trim().toLowerCase()) - .filter(Boolean) - - const queuedSetLower = new Set(queuedKeysLower) - const keysToCheckLower = Array.from(new Set([...watchedKeysLower, ...queuedKeysLower])) - - if (keysToCheckLower.length === 0) { - setPendingWatchedRooms([]) - } else { - const nextPending: PendingWatchedRoom[] = [] - - for (const keyLower of keysToCheckLower) { - const room = nextSnap[keyLower] - if (!room) continue - - const username = String(room?.username ?? '').trim() - const currentShow = String(room?.current_show ?? 'unknown') - - if (currentShow === 'public' && !queuedSetLower.has(keyLower)) continue - - const canonicalUrl = `https://chaturbate.com/${(username || keyLower).trim()}/` - - nextPending.push({ - id: keyLower, - modelKey: username || keyLower, - url: canonicalUrl, - currentShow, - imageUrl: String((room as any)?.image_url ?? ''), - }) - } - - nextPending.sort((a, b) => a.modelKey.localeCompare(b.modelKey, undefined, { sensitivity: 'base' })) - setPendingWatchedRooms(nextPending) - } - } - - // queued auto-start - if (!recSettingsRef.current.useChaturbateApi) return - if (busyRef.current) return - - const pendingMap = pendingAutoStartByKeyRef.current - const keys = Object.keys(pendingMap || {}) - .map((k) => String(k || '').toLowerCase()) - .filter(Boolean) - - for (const kLower of keys) { - const room = nextSnap[kLower] - if (!room) continue - if (String(room.current_show ?? '') !== 'public') continue - - const url = pendingMap[kLower] - if (!url) continue - - // ✅ nicht mehr seriell awaiten, sondern in die Start-Queue - enqueueStart({ url, silent: true, pendingKeyLower: kLower }) - } - - setLastHeaderUpdateAtMs(Date.now()) - })() - }, - }) - - return () => stop() - }, []) - - useEffect(() => { - // ✅ nur sinnvoll, wenn Chaturbate API aktiv ist - if (!recSettings.useChaturbateApi) { - setOnlineModelsCount(0) - return - } - - const stop = startChaturbateOnlinePolling({ - // ✅ leer => ALL-mode (durch fetchAllWhenNoModels) - getModels: () => [], - getShow: () => ['public', 'private', 'hidden', 'away'], - - // deutlich seltener, weil potentiell groß - intervalMs: 30000, - fetchAllWhenNoModels: true, - - onData: (data) => { - if (!data?.enabled) { - setOnlineModelsCount(0) - return - } - - const total = Number((data as any)?.total ?? 0) - setOnlineModelsCount(Number.isFinite(total) ? total : 0) - - setLastHeaderUpdateAtMs(Date.now()) - }, - - onError: (e) => { - console.error('[ALL-online poller] error', e) - }, - }) - - return () => stop() - }, [recSettings.useChaturbateApi]) - if (!authChecked) { return
Lade…
} @@ -2792,8 +2520,22 @@ export default function App() {
setSourceUrl(e.target.value)} + onMouseDown={(e) => { + // nur Linksklick + if (e.button !== 0) return + + // wenn schon fokussiert: Browser soll Caret nicht irgendwohin setzen + // und wir markieren gleich alles + e.preventDefault() + selectSourceUrl() + }} + onFocus={() => { + // z.B. Tab-Navigation ins Feld + selectSourceUrl() + }} placeholder="https://…" className="block w-full rounded-lg px-3 py-2.5 text-sm bg-white text-gray-900 shadow-sm ring-1 ring-gray-200 focus:outline-none focus:ring-2 focus:ring-indigo-500 dark:bg-white/10 dark:text-white dark:ring-white/10" /> @@ -2919,11 +2661,18 @@ export default function App() { runningJobs={runningJobs} cookies={cookies} blurPreviews={recSettings.blurPreviews} + + // ✅ neu: gleiche Teaser-Settings wie FinishedDownloads + teaserPlayback={recSettings.teaserPlayback ?? 'hover'} + teaserAudio={Boolean(recSettings.teaserAudio)} + onToggleHot={handleToggleHot} onDelete={handleDeleteJob} + onKeep={handleKeepJob} // ✅ neu onToggleFavorite={handleToggleFavorite} onToggleLike={handleToggleLike} onToggleWatch={handleToggleWatch} + onStopJob={stopJob} /> {playerJob ? ( diff --git a/frontend/src/components/ui/FinishedDownloadsCardsView.tsx b/frontend/src/components/ui/FinishedDownloadsCardsView.tsx index 5ae24f0..74d68f5 100644 --- a/frontend/src/components/ui/FinishedDownloadsCardsView.tsx +++ b/frontend/src/components/ui/FinishedDownloadsCardsView.tsx @@ -358,6 +358,8 @@ export default function FinishedDownloadsCardsView({ const [scrubActiveByKey, setScrubActiveByKey] = React.useState>({}) const [scrubHoveringByKey, setScrubHoveringByKey] = React.useState>({}) + const [hoveredThumbKey, setHoveredThumbKey] = React.useState(null) + const setScrubActiveIndex = React.useCallback((key: string, index: number | undefined) => { setScrubActiveByKey((prev) => { if (index === undefined) { @@ -619,11 +621,18 @@ export default function FinishedDownloadsCardsView({ } className="group/thumb relative aspect-video rounded-t-lg bg-black/5 dark:bg-white/5" onMouseEnter={ - isSmall || opts?.disablePreviewHover ? undefined : () => onHoverPreviewKeyChange?.(k) + isSmall || opts?.disablePreviewHover + ? undefined + : () => { + setHoveredThumbKey(k) + onHoverPreviewKeyChange?.(k) + } } onMouseLeave={() => { if (!isSmall && !opts?.disablePreviewHover) onHoverPreviewKeyChange?.(null) + setHoveredThumbKey((prev) => (prev === k ? null : prev)) clearScrubActiveIndex(k) + setScrubHovering(k, false) }} onClick={(e) => { e.preventDefault() @@ -674,8 +683,6 @@ export default function FinishedDownloadsCardsView({ animated={allowTeaserAnimation} animatedMode="teaser" animatedTrigger="always" - clipSeconds={1} - thumbSamples={18} inlineVideo={!opts?.disableInline && inlineActive ? 'always' : false} inlineNonce={inlineNonce} inlineControls={inlineActive} @@ -718,69 +725,55 @@ export default function FinishedDownloadsCardsView({ ) : null} {/* ✅ stashapp-artiger Hover-Scrubber (wie GalleryView) */} - {!opts?.isDecorative && !opts?.disableScrubber && !inlineActive && scrubberCount > 1 ? ( -
e.stopPropagation()} - onMouseDown={(e) => e.stopPropagation()} - onMouseEnter={() => setScrubHovering(k, true)} - onMouseLeave={() => { - setScrubHovering(k, false) - // optional: Index sofort loslassen, dann springt Bar direkt zurĂĽck auf Teaser - setScrubActiveIndex(k, undefined) - }} - > - setScrubActiveIndex(k, idx)} - onIndexClick={(index) => { - // wie Preview-Klick: inline starten - if (isSmall || opts?.disableInline) { - // Mobile/Decorative/Fallback: bestehendes Verhalten - handleScrubberClickIndex(j, index, scrubberCount) - return - } + {!opts?.isDecorative && + !opts?.disableScrubber && + !inlineActive && + scrubberCount > 1 && + hoveredThumbKey === k ? ( +
e.stopPropagation()} + onMouseDown={(e) => e.stopPropagation()} + onMouseEnter={() => setScrubHovering(k, true)} + onMouseLeave={() => { + setScrubHovering(k, false) + setScrubActiveIndex(k, undefined) + }} + > + setScrubActiveIndex(k, idx)} + onIndexClick={(index) => { + if (isSmall || opts?.disableInline) { + handleScrubberClickIndex(j, index, scrubberCount) + return + } - // Zielsekunde aus Scrubber ableiten - const seconds = - scrubberStepSeconds > 0 - ? index * scrubberStepSeconds - : 0 + const seconds = scrubberStepSeconds > 0 ? index * scrubberStepSeconds : 0 - // 1) bevorzugt: direkt inline an Position starten (falls Parent das unterstĂĽtzt) - if (startInlineAt) { - startInlineAt(k, seconds, inlineDomId) + if (startInlineAt) { + startInlineAt(k, seconds, inlineDomId) + requestAnimationFrame(() => { + if (!tryAutoplayInline(inlineDomId)) { + requestAnimationFrame(() => tryAutoplayInline(inlineDomId)) + } + }) + return + } - // wie bei Tap im Mobile-Stack: Autoplay nochmal anschubsen + startInline(k) requestAnimationFrame(() => { if (!tryAutoplayInline(inlineDomId)) { - requestAnimationFrame(() => { - tryAutoplayInline(inlineDomId) - }) + requestAnimationFrame(() => tryAutoplayInline(inlineDomId)) } }) - return - } - - // 2) Fallback: inline normal starten (ohne exakten Seek) - startInline(k) - requestAnimationFrame(() => { - if (!tryAutoplayInline(inlineDomId)) { - requestAnimationFrame(() => { - tryAutoplayInline(inlineDomId) - }) - } - }) - - // 3) Optionaler Fallback auf bestehenden Handler (wenn du dort OpenPlayerAt machst) - // handleScrubberClickIndex(j, index, scrubberCount) - }} - stepSeconds={scrubberStepSeconds} - /> -
- ) : null} + }} + stepSeconds={scrubberStepSeconds} + /> +
+ ) : null}
diff --git a/frontend/src/components/ui/FinishedDownloadsGalleryView.tsx b/frontend/src/components/ui/FinishedDownloadsGalleryView.tsx index f163628..9665740 100644 --- a/frontend/src/components/ui/FinishedDownloadsGalleryView.tsx +++ b/frontend/src/components/ui/FinishedDownloadsGalleryView.tsx @@ -247,6 +247,8 @@ export default function FinishedDownloadsGalleryView({ // âś… stashapp-artiger Hover-Scrubber-Zustand (pro Karte) const [scrubIndexByKey, setScrubIndexByKey] = React.useState>({}) + const [hoveredThumbKey, setHoveredThumbKey] = React.useState(null) + const setScrubIndexForKey = React.useCallback((key: string, index: number | undefined) => { setScrubIndexByKey((prev) => { if (index === undefined) { @@ -464,8 +466,12 @@ export default function FinishedDownloadsGalleryView({
onHoverPreviewKeyChange?.(k)} + onMouseEnter={() => { + setHoveredThumbKey(k) + onHoverPreviewKeyChange?.(k) + }} onMouseLeave={() => { + setHoveredThumbKey((prev) => (prev === k ? null : prev)) onHoverPreviewKeyChange?.(null) clearScrubIndex(k) setHoveredModelPreviewKey((prev) => (prev === k ? null : prev)) @@ -493,8 +499,6 @@ export default function FinishedDownloadsGalleryView({ } animatedMode="teaser" animatedTrigger="always" - clipSeconds={1} - thumbSamples={18} muted={previewMuted} popoverMuted={previewMuted} scrubProgressRatio={scrubProgressRatio} @@ -542,7 +546,7 @@ export default function FinishedDownloadsGalleryView({ ) : null} {/* âś… stashapp-artiger Hover-Scrubber (UI-only) */} - {hasScrubber ? ( + {hasScrubber && hoveredThumbKey === k ? (
e.stopPropagation()} @@ -554,10 +558,7 @@ export default function FinishedDownloadsGalleryView({ activeIndex={activeScrubIndex} onActiveIndexChange={(idx) => setScrubIndexForKey(k, idx)} onIndexClick={(index) => { - // optional: UI-Zustand direkt sichtbar halten setScrubIndexForKey(k, index) - - // bestehender Handler (Parent entscheidet: openPlayerAt / modal / etc.) handleScrubberClickIndex(j, index, scrubberCount) }} stepSeconds={scrubberStepSeconds} diff --git a/frontend/src/components/ui/FinishedVideoPreview.tsx b/frontend/src/components/ui/FinishedVideoPreview.tsx index 12273ac..63bcbcb 100644 --- a/frontend/src/components/ui/FinishedVideoPreview.tsx +++ b/frontend/src/components/ui/FinishedVideoPreview.tsx @@ -81,6 +81,10 @@ export type FinishedVideoPreviewProps = { preferScrubProgress?: boolean } +function baseName(path: string) { + return (path || '').split(/[\\/]/).pop() || '' +} + export default function FinishedVideoPreview({ job, getFileName, @@ -121,7 +125,7 @@ export default function FinishedVideoPreview({ scrubProgressRatio, preferScrubProgress = false, }: FinishedVideoPreviewProps) { - const file = getFileName(job.output || '') + const file = baseName(job.output || '') || getFileName(job.output || '') const blurCls = blur ? 'blur-md' : '' // ✅ meta robust normalisieren (job.meta kann string sein) @@ -356,7 +360,7 @@ export default function FinishedVideoPreview({ const stripHot = (s: string) => (s.startsWith('HOT ') ? s.slice(4) : s) const previewId = useMemo(() => { - const f = getFileName(job.output || '') + const f = baseName(job.output || '') || getFileName(job.output || '') if (!f) return '' const base = f.replace(/\.[^.]+$/, '') // ext weg return stripHot(base).trim() diff --git a/frontend/src/components/ui/LiveHlsVideo.tsx b/frontend/src/components/ui/LiveHlsVideo.tsx index 87b3a82..38bcd70 100644 --- a/frontend/src/components/ui/LiveHlsVideo.tsx +++ b/frontend/src/components/ui/LiveHlsVideo.tsx @@ -1,3 +1,5 @@ +// frontend\src\components\ui\LiveHlsVideo.tsx + 'use client' import { useEffect, useMemo, useRef, useState } from 'react' @@ -28,6 +30,8 @@ export default function LiveHlsVideo({ // ✅ manifestUrl ist stabil pro reloadKey const manifestUrl = useMemo(() => withNonce(src, reloadKey), [src, reloadKey]) + const lastReloadAtRef = useRef(0) + useEffect(() => { let cancelled = false let hls: Hls | null = null @@ -52,8 +56,13 @@ export default function LiveHlsVideo({ const hardReload = () => { if (cancelled) return + + const now = Date.now() + // ✅ verhindert Reload-Stürme (z.B. wenn hls.js kurz zickt) + if (now - lastReloadAtRef.current < 4000) return + lastReloadAtRef.current = now + cleanupTimers() - // ✅ Effect neu starten setReloadKey((x) => x + 1) } @@ -74,7 +83,7 @@ export default function LiveHlsVideo({ if (txt.includes('#EXTINF')) return { ok: true } } } catch {} - await new Promise((res) => setTimeout(res, 500)) + await new Promise((res) => setTimeout(res, 1200)) } // kein reason => "noch nicht ready" @@ -108,7 +117,7 @@ export default function LiveHlsVideo({ video.src = manifestUrl video.load() - video.play().catch(() => {}) + video.play().catch((e) => console.debug('[LiveHlsVideo] play() failed', e)) // ---- Stall Handling (native) ---- let lastProgressTs = Date.now() @@ -154,9 +163,31 @@ export default function LiveHlsVideo({ } hls = new Hls({ - lowLatencyMode: true, - liveSyncDurationCount: 2, - maxBufferLength: 8, + lowLatencyMode: false, + + // ✅ Live: nicht super-aggressiv hinterherlaufen + liveSyncDurationCount: 3, + liveMaxLatencyDurationCount: 10, + + // Buffer + maxBufferLength: 12, + backBufferLength: 30, + + // ✅ Netzwerk-Retry-Backoff (verhindert Request-Stürme) + manifestLoadingTimeOut: 8000, + manifestLoadingMaxRetry: 6, + manifestLoadingRetryDelay: 1000, + manifestLoadingMaxRetryTimeout: 8000, + + levelLoadingTimeOut: 8000, + levelLoadingMaxRetry: 6, + levelLoadingRetryDelay: 1000, + levelLoadingMaxRetryTimeout: 8000, + + fragLoadingTimeOut: 8000, + fragLoadingMaxRetry: 6, + fragLoadingRetryDelay: 1000, + fragLoadingMaxRetryTimeout: 8000, }) hls.on(Hls.Events.ERROR, (_evt, data) => { @@ -180,7 +211,7 @@ export default function LiveHlsVideo({ hls.attachMedia(video) hls.on(Hls.Events.MANIFEST_PARSED, () => { - video.play().catch(() => {}) + video.play().catch((e) => console.debug('[LiveHlsVideo] play() failed', e)) }) } diff --git a/frontend/src/components/ui/ModelDetails.tsx b/frontend/src/components/ui/ModelDetails.tsx index 1277100..d4fafbf 100644 --- a/frontend/src/components/ui/ModelDetails.tsx +++ b/frontend/src/components/ui/ModelDetails.tsx @@ -32,12 +32,72 @@ import { StarIcon as StarSolidIcon, EyeIcon as EyeSolidIcon, } from '@heroicons/react/24/solid' +import { useMediaQuery } from '../../lib/useMediaQuery' +import FinishedVideoPreview from './FinishedVideoPreview' +import TagOverflowRow from './TagOverflowRow' +import PreviewScrubber from './PreviewScrubber' +import { formatResolution } from './formatters' +import Pagination from './Pagination' +import LiveHlsVideo from './LiveHlsVideo' function cn(...parts: Array) { return parts.filter(Boolean).join(' ') } -// ------ helpers ------ +function isRunningJob(job: RecordJob): boolean { + const s = String((job as any)?.status ?? '').toLowerCase() + const ended = Boolean((job as any)?.endedAt ?? (job as any)?.completedAt) + return !ended && (s === 'running' || s === 'postwork') +} + +const MD_CACHE_TTL_MS = 10 * 60 * 1000 // 10 min + +type OnlineCacheEntry = { + at: number + room: ChaturbateRoom | null + meta: Pick | null +} + +type BioCacheEntry = { + at: number + bio: BioContext | null + meta: Pick | null +} + +// In-Memory (über Modal-Open/Close hinweg, solange Tab offen) +const mdOnlineMem = new Map() +const mdBioMem = new Map() + +function isFresh(at: number) { + return Number.isFinite(at) && Date.now() - at <= MD_CACHE_TTL_MS +} + +function ssGet(key: string): T | null { + try { + if (typeof window === 'undefined') return null + const raw = window.sessionStorage.getItem(key) + if (!raw) return null + return JSON.parse(raw) as T + } catch { + return null + } +} + +function ssSet(key: string, value: any) { + try { + if (typeof window === 'undefined') return + window.sessionStorage.setItem(key, JSON.stringify(value)) + } catch { + // ignore + } +} + +function ssKeyOnline(modelKey: string) { + return `md:cb:online:${modelKey}` +} +function ssKeyBio(modelKey: string) { + return `md:cb:bio:${modelKey}` +} const nf = new Intl.NumberFormat('de-DE') @@ -205,6 +265,71 @@ function endedLabel(job: RecordJob) { return ended ? shortDate(ended as any) : '—' } +function firstNonEmptyString(...values: unknown[]): string | undefined { + for (const v of values) { + if (typeof v === 'string') { + const s = v.trim() + if (s) return s + } + } + return undefined +} + +function parseJobMeta(metaRaw: unknown): any | null { + if (!metaRaw) return null + if (typeof metaRaw === 'string') { + try { + return JSON.parse(metaRaw) + } catch { + return null + } + } + if (typeof metaRaw === 'object') return metaRaw + return null +} + +function normalizeDurationSeconds(value: unknown): number | undefined { + if (typeof value !== 'number' || !Number.isFinite(value) || value <= 0) return undefined + // ms -> s Heuristik wie in FinishedVideoPreview + return value > 24 * 60 * 60 ? value / 1000 : value +} + +function clamp(n: number, min: number, max: number) { + return Math.max(min, Math.min(max, n)) +} + +const DEFAULT_SPRITE_STEP_SECONDS = 5 + +function chooseSpriteGrid(count: number): [number, number] { + if (count <= 1) return [1, 1] + + const targetRatio = 16 / 9 + let bestCols = 1 + let bestRows = count + let bestWaste = Number.POSITIVE_INFINITY + let bestRatioScore = Number.POSITIVE_INFINITY + + for (let c = 1; c <= count; c++) { + const r = Math.max(1, Math.ceil(count / c)) + const waste = c * r - count + const ratio = c / r + const ratioScore = Math.abs(ratio - targetRatio) + + if ( + waste < bestWaste || + (waste === bestWaste && ratioScore < bestRatioScore) || + (waste === bestWaste && ratioScore === bestRatioScore && r < bestRows) + ) { + bestWaste = waste + bestRatioScore = ratioScore + bestCols = c + bestRows = r + } + } + + return [bestCols, bestRows] +} + // ------ API types (Chaturbate online) ------ type ChaturbateRoom = { @@ -327,11 +452,12 @@ type Props = { open: boolean modelKey: string | null onClose: () => void - onOpenPlayer?: (job: RecordJob) => void + onOpenPlayer?: (job: RecordJob, startAtSec?: number) => void cookies?: Record runningJobs?: RecordJob[] blurPreviews?: boolean - + teaserPlayback?: 'still' | 'hover' | 'all' + teaserAudio?: boolean onToggleWatch?: (job: RecordJob) => void | Promise onToggleFavorite?: (job: RecordJob) => void | Promise onToggleLike?: (job: RecordJob) => void | Promise @@ -342,6 +468,8 @@ type Props = { | { ok?: boolean; oldFile?: string; newFile?: string } | Promise onDelete?: (job: RecordJob) => void | Promise + onKeep?: (job: RecordJob) => void | Promise + onStopJob?: (id: string) => void | Promise } function normalizeModelKey(raw: string | null | undefined): string { @@ -397,13 +525,18 @@ export default function ModelDetails({ cookies, runningJobs, blurPreviews, - + teaserPlayback = 'hover', + teaserAudio = false, onToggleWatch, onToggleFavorite, onToggleLike, onToggleHot, onDelete, + onKeep, + onStopJob }: Props) { + + const isDesktop = useMediaQuery('(min-width: 640px)') const [models, setModels] = React.useState([]) const [, setModelsLoading] = React.useState(false) @@ -425,13 +558,71 @@ export default function ModelDetails({ const [imgViewer, setImgViewer] = React.useState<{ src: string; alt?: string } | null>(null) + const [runningHover, setRunningHover] = React.useState(false) + + const [stopPending, setStopPending] = React.useState(false) + const [doneTotalCount, setDoneTotalCount] = React.useState(0) const [donePage, setDonePage] = React.useState(1) - const DONE_PAGE_SIZE = 25 + const DONE_PAGE_SIZE = 4 const key = normalizeModelKey(modelKey) + // ===== Gallery UI State (wie FinishedDownloadsGalleryView) ===== + const [durations, setDurations] = React.useState>({}) + const [hoverTeaserKey, setHoverTeaserKey] = React.useState(null) + const [teaserKey, setTeaserKey] = React.useState(null) + + const [hoveredModelPreviewKey, setHoveredModelPreviewKey] = React.useState(null) + const [scrubIndexByKey, setScrubIndexByKey] = React.useState>({}) + + const [hoveredThumbKey, setHoveredThumbKey] = React.useState(null) + + const lower = React.useCallback((s: string) => String(s ?? '').toLowerCase(), []) + + const deletingKeys = React.useMemo(() => new Set(), []) + const keepingKeys = React.useMemo(() => new Set(), []) + const removingKeys = React.useMemo(() => new Set(), []) + const deletedKeys = React.useMemo(() => new Set(), []) + + // ✅ 1) Beim Öffnen sofort aus Cache rendern + React.useEffect(() => { + if (!open || !key) return + + // Online + const memOnline = mdOnlineMem.get(key) + const ssOnline = ssGet(ssKeyOnline(key)) + + const onlineHit = + (memOnline && isFresh(memOnline.at) ? memOnline : null) || + (ssOnline && isFresh(ssOnline.at) ? ssOnline : null) + + if (onlineHit) { + setRoom(onlineHit.room ?? null) + setRoomMeta(onlineHit.meta ?? null) + } + + // Bio + const memBio = mdBioMem.get(key) + const ssBio = ssGet(ssKeyBio(key)) + + const bioHit = + (memBio && isFresh(memBio.at) ? memBio : null) || + (ssBio && isFresh(ssBio.at) ? ssBio : null) + + if (bioHit) { + setBio(bioHit.bio ?? null) + setBioMeta(bioHit.meta ?? null) + // bioLoading NICHT anfassen – Fetch kann trotzdem laufen + } + }, [open, key]) + + React.useEffect(() => { + if (!open) return + setStopPending(false) + }, [open, key]) + const refetchModels = React.useCallback(async () => { try { const r = await fetch('/api/models', { cache: 'no-store' }) @@ -520,53 +711,62 @@ export default function ModelDetails({ } }, [open]) - // Chaturbate online room info + // ✅ Online: nur einmalig laden (kein Polling) React.useEffect(() => { if (!open || !key) return - let alive = true - let timer: any = null - let inFlight: AbortController | null = null + // wenn wir frische Daten aus Cache haben -> keinen Request + const mem = mdOnlineMem.get(key) + const ss = ssGet(ssKeyOnline(key)) + const hit = + (mem && isFresh(mem.at) ? mem : null) || + (ss && isFresh(ss.at) ? ss : null) - const run = async () => { + if (hit) return + + let alive = true + const ac = new AbortController() + + ;(async () => { try { - inFlight?.abort() - inFlight = new AbortController() + const cookieHeader = buildChaturbateCookieHeader(cookies) const r = await fetch('/api/chaturbate/online', { method: 'POST', - headers: { 'Content-Type': 'application/json' }, + headers: { + 'Content-Type': 'application/json', + ...(cookieHeader ? { 'X-Chaturbate-Cookie': cookieHeader } : {}), + }, cache: 'no-store', - signal: inFlight.signal, + signal: ac.signal, body: JSON.stringify({ q: [key], show: [], refresh: false }), }) const data = (await r.json().catch(() => null)) as OnlineResp if (!alive) return - setRoomMeta({ enabled: data?.enabled, fetchedAt: data?.fetchedAt, lastError: data?.lastError }) + const meta = { enabled: data?.enabled, fetchedAt: data?.fetchedAt, lastError: data?.lastError } const rooms = Array.isArray(data?.rooms) ? data.rooms : [] - setRoom(rooms[0] ?? null) + const nextRoom = rooms[0] ?? null + + setRoomMeta(meta) + setRoom(nextRoom) + + const entry: OnlineCacheEntry = { at: Date.now(), room: nextRoom, meta } + mdOnlineMem.set(key, entry) + ssSet(ssKeyOnline(key), entry) } catch (e: any) { if (e?.name === 'AbortError') return if (!alive) return setRoomMeta({ enabled: undefined, fetchedAt: undefined, lastError: 'Fetch fehlgeschlagen' }) - setRoom(null) } - } - - // sofort - void run() - - // alle 8s (wie Preset) - timer = window.setInterval(run, 8000) + })() return () => { alive = false - if (timer) window.clearInterval(timer) - inFlight?.abort() + ac.abort() } - }, [open, key]) + }, [open, key, cookies]) // ✅ NEW: BioContext (proxy) React.useEffect(() => { @@ -598,11 +798,20 @@ export default function ModelDetails({ if (!alive) return setBioMeta({ enabled: data?.enabled, fetchedAt: data?.fetchedAt, lastError: data?.lastError }) setBio((data?.bio as BioContext) ?? null) + + const meta = { enabled: data?.enabled, fetchedAt: data?.fetchedAt, lastError: data?.lastError } + const nextBio = (data?.bio as BioContext) ?? null + + setBioMeta(meta) + setBio(nextBio) + + const entry: BioCacheEntry = { at: Date.now(), bio: nextBio, meta } + mdBioMem.set(key, entry) + ssSet(ssKeyBio(key), entry) }) .catch((e) => { if (!alive) return setBioMeta({ enabled: undefined, fetchedAt: undefined, lastError: e?.message || 'Fetch fehlgeschlagen' }) - setBio(null) }) .finally(() => { if (!alive) return @@ -667,6 +876,9 @@ export default function ModelDetails({ }) }, [runningList, key]) + // ✅ Running-Hero: wenn es einen laufenden Job für dieses Model gibt, nimm dessen Preview + const runningHeroJob = runningMatches.length ? runningMatches[0] : null + const allTags = React.useMemo(() => { const a = splitTags(model?.tags) const b = Array.isArray(room?.tags) ? room!.tags : [] @@ -728,6 +940,29 @@ export default function ModelDetails({
) + const keyFor = React.useCallback((j: RecordJob) => { + // stabil, auch wenn id fehlt + const id = String((j as any)?.id ?? '') + const out = String(j.output ?? '') + return id ? `${id}::${out}` : out + }, []) + + const addToSet = (setState: React.Dispatch>>, k: string) => + setState((prev) => { + if (prev.has(k)) return prev + const next = new Set(prev) + next.add(k) + return next + }) + + const delFromSet = (setState: React.Dispatch>>, k: string) => + setState((prev) => { + if (!prev.has(k)) return prev + const next = new Set(prev) + next.delete(k) + return next + }) + const handleToggleHot = React.useCallback( async (job: RecordJob) => { const out = job.output || '' @@ -760,27 +995,20 @@ export default function ModelDetails({ [onToggleHot, refetchDone] ) - const handleDelete = React.useCallback( - async (job: RecordJob) => { - const out = job.output || '' - const file = baseName(out) + const handleScrubberClickIndex = React.useCallback( + (job: RecordJob, segmentIndex: number, _segmentCount: number) => { + const metaRaw = (job as any)?.meta + const meta = typeof metaRaw === 'string' ? (() => { try { return JSON.parse(metaRaw) } catch { return null } })() : metaRaw - // ✅ UI sofort: raus aus Liste - if (file) { - setDone((prev) => - prev.filter((j) => { - const same = - (j.id && job.id && j.id === job.id) || - (j.output && job.output && j.output === job.output) - return !same - }) - ) - } + const step = + typeof meta?.previewSprite?.stepSeconds === 'number' && Number.isFinite(meta.previewSprite.stepSeconds) && meta.previewSprite.stepSeconds > 0 + ? meta.previewSprite.stepSeconds + : 5 - await onDelete?.(job) - refetchDone() + const startAtSec = Math.max(0, Math.floor(segmentIndex) * step) + onOpenPlayer?.(job, startAtSec) }, - [onDelete, refetchDone] + [onOpenPlayer] ) const handleToggleFavoriteModel = React.useCallback(async () => { @@ -837,6 +1065,82 @@ export default function ModelDetails({ refetchModels() }, [key, onToggleWatch, refetchModels]) + React.useEffect(() => { + if (!open) { + setHoverTeaserKey(null) + setTeaserKey(null) + setHoveredModelPreviewKey(null) + setHoveredThumbKey(null) + setScrubIndexByKey({}) + setDurations({}) + } + }, [open, key]) + + const handleDuration = React.useCallback( + (job: RecordJob, seconds: number) => { + const k = keyFor(job) + setDurations((prev) => (prev[k] === seconds ? prev : { ...prev, [k]: seconds })) + }, + [keyFor] + ) + + const handleHoverPreviewKeyChange = React.useCallback( + (k: string | null) => { + setHoverTeaserKey(k) + if (teaserPlayback === 'hover') setTeaserKey(k) + }, + [teaserPlayback] + ) + + const runtimeOf = React.useCallback( + (job: RecordJob) => { + const k = keyFor(job) + const raw = + (job as any)?.durationSeconds ?? + (job as any)?.meta?.durationSeconds ?? + durations[k] + + const n = + typeof raw === 'number' && Number.isFinite(raw) + ? raw > 24 * 60 * 60 + ? raw / 1000 + : raw + : null + + return fmtHms(n) + }, + [durations, keyFor] + ) + + const sizeBytesOf = React.useCallback((job: RecordJob) => { + const v = + (job as any)?.sizeBytes ?? + (job as any)?.size ?? + (job as any)?.meta?.sizeBytes ?? + (job as any)?.meta?.size + return typeof v === 'number' && Number.isFinite(v) ? v : null + }, []) + + const formatBytes = React.useCallback((bytes?: number | null) => fmtBytes(bytes ?? null), []) + + const setScrubIndexForKey = React.useCallback((key: string, index: number | undefined) => { + setScrubIndexByKey((prev) => { + if (index === undefined) { + if (!(key in prev)) return prev + const next = { ...prev } + delete next[key] + return next + } + if (prev[key] === index) return prev + return { ...prev, [key]: index } + }) + }, []) + + const clearScrubIndex = React.useCallback( + (key: string) => setScrubIndexForKey(key, undefined), + [setScrubIndexForKey] + ) + type TabKey = 'info' | 'downloads' | 'running' const [tab, setTab] = React.useState('info') @@ -851,7 +1155,19 @@ export default function ModelDetails({ { id: 'running', label: 'Running', count: runningMatches.length ? fmtInt(runningMatches.length) : undefined, disabled: runningLoading }, ] - + // ✅ Adapter: RecordJobActions erwartet void|boolean. + // Dein onToggleHot darf ein Objekt zurückgeben -> wir droppen das. + const onToggleHotAction = React.useCallback( + async (job: RecordJob): Promise => { + try { + await onToggleHot?.(job) + return true + } catch { + return false + } + }, + [onToggleHot] + ) return ( heroImgFull && openImage(heroImgFull, titleName)} + onClick={() => (heroImgFull ? openImage(heroImgFull, titleName) : undefined)} aria-label="Bild vergrößern" > {heroImg ? ( @@ -1823,28 +2139,20 @@ export default function ModelDetails({
-
- - - - Seite {donePage} / {doneTotalPages} - - - +
+ setDonePage(p)} + siblingCount={1} + boundaryCount={1} + showSummary={true} + prevLabel="ZurĂĽck" + nextLabel="Weiter" + ariaLabel="Downloads Seiten" + className="border-0 bg-transparent px-0 py-0 dark:bg-transparent" + />
@@ -1856,105 +2164,330 @@ export default function ModelDetails({ Keine abgeschlossenen Downloads fĂĽr dieses Model gefunden. ) : ( -
+
{doneMatches.map((j) => { + const k = keyFor(j) + + const allowSound = Boolean(teaserAudio) && hoverTeaserKey === k + const previewMuted = !allowSound + const fileRaw = baseName(j.output || '') const isHot = isHotName(fileRaw) - const file = niceFileLabel(fileRaw) - const dur = fmtHms((j as any).durationSeconds) - const size = fmtBytes((j as any).sizeBytes) - const ended = endedLabel(j) + const file = stripHotPrefix(fileRaw) - // busy-state optional (wenn du sowas willst) - // const k = `${j.id}-${fileRaw}` - // const busy = false + const dur = runtimeOf(j) + const size = formatBytes(sizeBytesOf(j)) + + // Auflösung bevorzugt aus meta/videoWidth/videoHeight + const meta = parseJobMeta((j as any)?.meta) + const resObj = + typeof meta?.videoWidth === 'number' && + typeof meta?.videoHeight === 'number' && + meta.videoWidth > 0 && + meta.videoHeight > 0 + ? { w: meta.videoWidth, h: meta.videoHeight } + : typeof (j as any)?.videoWidth === 'number' && + typeof (j as any)?.videoHeight === 'number' && + (j as any).videoWidth > 0 && + (j as any).videoHeight > 0 + ? { w: (j as any).videoWidth, h: (j as any).videoHeight } + : null + + const resLabel = formatResolution(resObj) + + const busy = deletingKeys.has(k) || keepingKeys.has(k) || removingKeys.has(k) + const deleted = deletedKeys.has(k) + + // Model “Flags” (hier: aktuelles Model) + const isFav = Boolean(model?.favorite) + const isLiked = model?.liked === true + const isWatching = Boolean(model?.watching) + + // Tags: nimm stored + room-tags (wie links), deduped + const cardTags = allTags + + // Model Preview Bild: nimm Hero + const modelImageSrc = firstNonEmptyString(heroImgFull, heroImg) + + // Preview-ID (für sprite fallback) + const fileForPreviewId = stripHotPrefix(baseName(j.output || '')) + const previewId = fileForPreviewId.replace(/\.[^.]+$/, '').trim() + + // -------- Sprite/Scrubber Setup (wie Gallery) -------- + const spritePathRaw = firstNonEmptyString( + meta?.previewSprite?.path, + (meta as any)?.previewSpritePath, + previewId ? `/api/preview-sprite/${encodeURIComponent(previewId)}` : undefined + ) + const spritePath = spritePathRaw ? spritePathRaw.replace(/\/+$/, '') : undefined + + const spriteStepSecondsRaw = meta?.previewSprite?.stepSeconds ?? (meta as any)?.previewSpriteStepSeconds + const spriteStepSeconds = + typeof spriteStepSecondsRaw === 'number' && Number.isFinite(spriteStepSecondsRaw) && spriteStepSecondsRaw > 0 + ? spriteStepSecondsRaw + : DEFAULT_SPRITE_STEP_SECONDS + + const durationForSprite = + normalizeDurationSeconds(meta?.durationSeconds) ?? + normalizeDurationSeconds((j as any)?.durationSeconds) ?? + normalizeDurationSeconds(durations[k]) + + const inferredSpriteCountFromDuration = + typeof durationForSprite === 'number' && durationForSprite > 0 + ? Math.max(1, Math.min(200, Math.floor(durationForSprite / spriteStepSeconds) + 1)) + : undefined + + const spriteCountRaw = + meta?.previewSprite?.count ?? + (meta as any)?.previewSpriteCount ?? + inferredSpriteCountFromDuration + + const spriteColsRaw = meta?.previewSprite?.cols ?? (meta as any)?.previewSpriteCols + const spriteRowsRaw = meta?.previewSprite?.rows ?? (meta as any)?.previewSpriteRows + + const spriteCount = + typeof spriteCountRaw === 'number' && Number.isFinite(spriteCountRaw) ? Math.max(0, Math.floor(spriteCountRaw)) : 0 + + const [inferredCols, inferredRows] = spriteCount > 1 ? chooseSpriteGrid(spriteCount) : [0, 0] + + const spriteCols = + typeof spriteColsRaw === 'number' && Number.isFinite(spriteColsRaw) ? Math.max(0, Math.floor(spriteColsRaw)) : inferredCols + + const spriteRows = + typeof spriteRowsRaw === 'number' && Number.isFinite(spriteRowsRaw) ? Math.max(0, Math.floor(spriteRowsRaw)) : inferredRows + + const spriteVersion = + (typeof meta?.updatedAtUnix === 'number' && Number.isFinite(meta.updatedAtUnix) ? meta.updatedAtUnix : undefined) ?? + (typeof (meta as any)?.fileModUnix === 'number' && Number.isFinite((meta as any).fileModUnix) ? (meta as any).fileModUnix : undefined) ?? + 0 + + const spriteUrl = spritePath && spriteVersion ? `${spritePath}?v=${encodeURIComponent(String(spriteVersion))}` : spritePath || undefined + + const hasScrubberUi = Boolean(spriteUrl) && spriteCount > 1 + const hasSpriteScrubber = hasScrubberUi && spriteCols > 0 && spriteRows > 0 + + const scrubberCount = hasScrubberUi ? spriteCount : 0 + const scrubberStepSeconds = hasScrubberUi ? spriteStepSeconds : 0 + const hasScrubber = hasScrubberUi + + const activeScrubIndex = scrubIndexByKey[k] + const scrubProgressRatio = + typeof activeScrubIndex === 'number' && scrubberCount > 1 ? clamp(activeScrubIndex / (scrubberCount - 1), 0, 1) : undefined + + const spriteFrameStyle: React.CSSProperties | undefined = + hasSpriteScrubber && typeof activeScrubIndex === 'number' + ? (() => { + const idx = clamp(activeScrubIndex, 0, Math.max(0, spriteCount - 1)) + const col = idx % spriteCols + const row = Math.floor(idx / spriteCols) + + const posX = spriteCols <= 1 ? 0 : (col / (spriteCols - 1)) * 100 + const posY = spriteRows <= 1 ? 0 : (row / (spriteRows - 1)) * 100 + + return { + backgroundImage: `url("${spriteUrl}")`, + backgroundRepeat: 'no-repeat', + backgroundSize: `${spriteCols * 100}% ${spriteRows * 100}%`, + backgroundPosition: `${posX}% ${posY}%`, + } + })() + : undefined + + const showModelPreviewInThumb = hoveredModelPreviewKey === k && Boolean(modelImageSrc) + const showScrubberSpriteInThumb = !showModelPreviewInThumb && Boolean(spriteFrameStyle) + const hideTeaserUnderOverlay = showModelPreviewInThumb || showScrubberSpriteInThumb + + if (deleted) return null return (
onOpenPlayer?.(j)} onKeyDown={(e) => { - if (!onOpenPlayer) return - if (e.key === 'Enter' || e.key === ' ') onOpenPlayer(j) + if (e.key === 'Enter' || e.key === ' ') onOpenPlayer?.(j) }} > - {/* "Preview" area (placeholder) */} -
- {/* Placeholder background */} -
+ {/* Thumb */} +
{ + setHoveredThumbKey(k) + handleHoverPreviewKeyChange(k) + }} + onMouseLeave={() => { + setHoveredThumbKey(null) + handleHoverPreviewKeyChange(null) + clearScrubIndex(k) + setHoveredModelPreviewKey((prev) => (prev === k ? null : prev)) + }} + > +
+
+ stripHotPrefix(baseName(p))} + durationSeconds={durations[k] ?? (j as any)?.durationSeconds} + onDuration={handleDuration} + variant="fill" + showPopover={false} + blur={blurPreviews} + animated={ + hideTeaserUnderOverlay + ? false + : teaserPlayback === 'all' + ? true + : teaserPlayback === 'hover' + ? teaserKey === k + : false + } + animatedMode="teaser" + animatedTrigger="always" + muted={previewMuted} + popoverMuted={previewMuted} + scrubProgressRatio={scrubProgressRatio} + preferScrubProgress={typeof activeScrubIndex === 'number'} + /> +
- {/* subtle blur effect option (if you want it to match) */} -
+ {/* Sprite preload */} + {hasSpriteScrubber && spriteUrl ? ( + + ) : null} - {/* Actions overlay (top-right) */} -
e.stopPropagation()} - onMouseDown={(e) => e.stopPropagation()} - > - -
+ {/* Sprite overlay frame */} + {showScrubberSpriteInThumb && spriteFrameStyle ? ( +