This commit is contained in:
Linrador 2026-03-03 21:14:39 +01:00
parent 4d69c90722
commit a0a869c5a5
36 changed files with 5159 additions and 5051 deletions

View File

@ -201,10 +201,21 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU
return out, nil return out, nil
} }
// 🔒 Schutz gegen Race: sehr frische Dateien sind evtl. noch nicht finalisiert/kopiert // 🔒 Schutz gegen Race: sehr frische Dateien sind evtl. noch nicht finalisiert/kopiert.
// (typisch: moov atom fehlt noch) // Statt direkt zu skippen: kurz warten und dann weitermachen (sonst gibt es keinen Retry).
if time.Since(fi.ModTime()) < 10*time.Second { if age := time.Since(fi.ModTime()); age < 10*time.Second {
return out, nil wait := 10*time.Second - age
// nicht ewig blocken, respektiere ctx
if wait > 0 {
t := time.NewTimer(wait)
defer t.Stop()
select {
case <-t.C:
// weiter
case <-ctx.Done():
return out, ctx.Err()
}
}
} }
id := assetIDFromVideoPath(videoPath) id := assetIDFromVideoPath(videoPath)
@ -295,16 +306,18 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU
progress(0.10) progress(0.10)
t := 0.0 // ✅ Immer letztes Frame bevorzugen (Preview soll “Endzustand” zeigen)
if meta.durSec > 0 { img, e1 := extractLastFrameWebP(videoPath)
t = meta.durSec * 0.5
}
progress(0.15)
img, e1 := extractFrameAtTimeWebP(videoPath, t)
if e1 != nil || len(img) == 0 { if e1 != nil || len(img) == 0 {
img, e1 = extractLastFrameWebP(videoPath) // Fallback: wenn wir Duration kennen, versuche kurz vor Ende
if meta.durSec > 0 {
t := meta.durSec - 0.25
if t < 0 {
t = 0
}
img, e1 = extractFrameAtTimeWebP(videoPath, t)
}
// Letzter Fallback: erstes Frame
if e1 != nil || len(img) == 0 { if e1 != nil || len(img) == 0 {
img, e1 = extractFirstFrameWebPScaled(videoPath, 720, 75) img, e1 = extractFirstFrameWebPScaled(videoPath, 720, 75)
} }

View File

@ -1,4 +1,4 @@
// backend\generate_sprite.go // backend\assets_sprite.go
package main package main

View File

@ -9,6 +9,7 @@ import (
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"path/filepath"
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
@ -91,6 +92,25 @@ var (
cbRefreshInFlight bool cbRefreshInFlight bool
) )
// --- HLS refresh throttling (damit /online nicht zu teuer wird) ---
var cbHlsRefreshMu sync.Mutex
var cbHlsRefreshAt = map[string]time.Time{} // key=userLower -> last refresh time
func shouldRefreshHLS(userLower string, minInterval time.Duration) bool {
if userLower == "" {
return false
}
cbHlsRefreshMu.Lock()
defer cbHlsRefreshMu.Unlock()
last := cbHlsRefreshAt[userLower]
if !last.IsZero() && time.Since(last) < minInterval {
return false
}
cbHlsRefreshAt[userLower] = time.Now()
return true
}
func normalizeList(in []string) []string { func normalizeList(in []string) []string {
seen := map[string]bool{} seen := map[string]bool{}
out := make([]string, 0, len(in)) out := make([]string, 0, len(in))
@ -514,6 +534,109 @@ func hashKey(parts ...string) string {
return hex.EncodeToString(h.Sum(nil)) return hex.EncodeToString(h.Sum(nil))
} }
// jobMatchesUser prüft, ob ein laufender Job zu diesem Username gehört.
// (wir matchen über SourceURL und Output-Pfad robust genug ohne modelNameFromFilename Abhängigkeit)
func jobMatchesUser(j *RecordJob, userLower string) bool {
if j == nil {
return false
}
u := strings.ToLower(strings.TrimSpace(userLower))
if u == "" {
return false
}
// 1) SourceURL enthält meist /<username>
if s := strings.ToLower(strings.TrimSpace(j.SourceURL)); s != "" {
if strings.Contains(s, "/"+u) || strings.HasSuffix(s, "/"+u) || strings.HasSuffix(s, u) {
return true
}
}
// 2) Output-Pfad enthält bei dir häufig den modelKey im Dateinamen/Ordner
if out := strings.ToLower(strings.TrimSpace(j.Output)); out != "" {
base := strings.ToLower(strings.TrimSpace(filepath.Base(out)))
if strings.Contains(base, u) {
return true
}
dir := strings.ToLower(strings.TrimSpace(filepath.Base(filepath.Dir(out))))
if dir == u {
return true
}
}
return false
}
// fetchCurrentBestHLS lädt die Room-Seite, parsed hls_source und wählt die beste Variant-Playlist.
func fetchCurrentBestHLS(ctx context.Context, username string, cookie string, userAgent string) (string, error) {
u := strings.TrimSpace(username)
if u == "" {
return "", fmt.Errorf("empty username")
}
hc := NewHTTPClient(userAgent)
pageURL := "https://chaturbate.com/" + strings.Trim(u, "/")
body, err := hc.FetchPage(ctx, pageURL, cookie)
if err != nil {
return "", err
}
master, err := ParseStream(body) // -> hls_source
if err != nil {
return "", err
}
pl, err := FetchPlaylist(ctx, hc, master, cookie) // -> beste Variant
if err != nil {
return "", err
}
return strings.TrimSpace(pl.PlaylistURL), nil
}
// refreshRunningJobsHLS aktualisiert PreviewM3U8 (+Cookie/UA) für passende laufende Jobs.
// Wenn die URL rotiert hat: stopPreview(job) damit ffmpeg neu startet.
func refreshRunningJobsHLS(userLower string, newHls string, cookie string, ua string) {
if strings.TrimSpace(userLower) == "" || strings.TrimSpace(newHls) == "" {
return
}
changedAny := false
jobsMu.Lock()
for _, j := range jobs {
if j == nil || j.Status != JobRunning {
continue
}
if !jobMatchesUser(j, userLower) {
continue
}
old := strings.TrimSpace(j.PreviewM3U8)
j.PreviewM3U8 = newHls
j.PreviewCookie = cookie
j.PreviewUA = ua
// Wenn ffmpeg schon läuft und sich Quelle geändert hat -> hart stoppen
if old != "" && old != newHls {
stopPreview(j)
// PreviewState zurücksetzen (damit "private/offline" nicht hängen bleibt)
j.PreviewState = ""
j.PreviewStateAt = ""
j.PreviewStateMsg = ""
}
changedAny = true
}
jobsMu.Unlock()
if changedAny {
notifyJobsChanged()
}
}
func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) { func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet && r.Method != http.MethodPost { if r.Method != http.MethodGet && r.Method != http.MethodPost {
http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed) http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed)
@ -522,6 +645,15 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
enabled := getSettings().UseChaturbateAPI enabled := getSettings().UseChaturbateAPI
// Optional: Cookie vom Frontend (für Cloudflare/session best effort)
cookieHeader := strings.TrimSpace(r.Header.Get("X-Chaturbate-Cookie"))
// UA vom Client (oder fallback)
reqUA := strings.TrimSpace(r.Header.Get("User-Agent"))
if reqUA == "" {
reqUA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"
}
// --------------------------- // ---------------------------
// Request params (GET/POST) // Request params (GET/POST)
// --------------------------- // ---------------------------
@ -717,6 +849,45 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
liteByUser := cb.LiteByUser liteByUser := cb.LiteByUser
cbMu.RUnlock() cbMu.RUnlock()
// ---------------------------
// ✅ HLS URL Refresh für laufende Jobs (best effort)
// Trigger nur, wenn explizite Users angefragt werden (dein Frontend macht das so)
// und nur wenn User gerade online ist.
// ---------------------------
if onlySpecificUsers && liteByUser != nil {
const hlsMinInterval = 12 * time.Second // throttle pro user
for _, u := range users {
rm, ok := liteByUser[u]
if !ok {
continue // offline -> nichts
}
// Optional: nur wenn wirklich "public" (reduziert unnötige fetches)
// Wenn du auch in "private" previewen willst, entferne diesen Block.
show := strings.ToLower(strings.TrimSpace(rm.CurrentShow))
if show == "offline" || show == "" {
continue
}
// throttle
if !shouldRefreshHLS(u, hlsMinInterval) {
continue
}
// HLS holen (kurzer Timeout soll /online nicht blockieren)
ctx, cancel := context.WithTimeout(r.Context(), 8*time.Second)
newHls, err := fetchCurrentBestHLS(ctx, rm.Username, cookieHeader, reqUA)
cancel()
if err != nil || strings.TrimSpace(newHls) == "" {
continue
}
// Jobs aktualisieren + ggf. Preview stoppen
refreshRunningJobsHLS(u, newHls, cookieHeader, reqUA)
}
}
// --------------------------- // ---------------------------
// Persist "last seen online/offline" für explizit angefragte User // Persist "last seen online/offline" für explizit angefragte User
// --------------------------- // ---------------------------

View File

@ -1,185 +0,0 @@
// backend\generated_gc.go
package main
import (
"fmt"
"io/fs"
"os"
"path/filepath"
"strings"
"sync/atomic"
"time"
)
var generatedGCRunning int32
type generatedGCStats struct {
Checked int
Removed int
}
// Läuft synchron und liefert Zahlen zurück (für /api/settings/cleanup Response).
func triggerGeneratedGarbageCollectorSync() generatedGCStats {
// nur 1 GC gleichzeitig
if !atomic.CompareAndSwapInt32(&generatedGCRunning, 0, 1) {
fmt.Println("🧹 [gc] skip: already running")
return generatedGCStats{}
}
defer atomic.StoreInt32(&generatedGCRunning, 0)
stats := runGeneratedGarbageCollector()
return stats
}
// Läuft 1× nach Serverstart (mit Delay), löscht /generated/* Orphans.
func startGeneratedGarbageCollector() {
go func() {
time.Sleep(3 * time.Second)
triggerGeneratedGarbageCollectorSync()
}()
}
// Core-Logik ohne Delay (für manuelle Trigger, z.B. nach Cleanup)
// Liefert Stats zurück, damit /api/settings/cleanup die Zahlen anzeigen kann.
func runGeneratedGarbageCollector() generatedGCStats {
stats := generatedGCStats{}
s := getSettings()
doneAbs, err := resolvePathRelativeToApp(s.DoneDir)
if err != nil {
fmt.Println("🧹 [gc] resolve doneDir failed:", err)
return stats
}
doneAbs = strings.TrimSpace(doneAbs)
if doneAbs == "" {
return stats
}
// 1) Live-IDs sammeln: alle mp4/ts unter /done (rekursiv), .trash ignorieren
live := make(map[string]struct{}, 4096)
_ = filepath.WalkDir(doneAbs, func(p string, d fs.DirEntry, err error) error {
if err != nil {
return nil
}
name := d.Name()
if d.IsDir() {
if strings.EqualFold(name, ".trash") {
return fs.SkipDir
}
return nil
}
ext := strings.ToLower(filepath.Ext(name))
if ext != ".mp4" && ext != ".ts" {
return nil
}
info, err := d.Info()
if err != nil || info.IsDir() || info.Size() <= 0 {
return nil
}
base := strings.TrimSuffix(name, ext)
id, err := sanitizeID(stripHotPrefix(base))
if err != nil || id == "" {
return nil
}
live[id] = struct{}{}
return nil
})
// 2) /generated/meta/<id> prüfen
metaRoot, err := generatedMetaRoot()
if err == nil {
metaRoot = strings.TrimSpace(metaRoot)
}
if err != nil || metaRoot == "" {
return stats
}
removedMeta := 0
checkedMeta := 0
if entries, err := os.ReadDir(metaRoot); err == nil {
for _, e := range entries {
if !e.IsDir() {
continue
}
id := strings.TrimSpace(e.Name())
if id == "" || strings.HasPrefix(id, ".") {
continue
}
checkedMeta++
if _, ok := live[id]; ok {
continue
}
removeGeneratedForID(id)
removedMeta++
}
}
fmt.Printf("🧹 [gc] generated/meta checked=%d removed_orphans=%d\n", checkedMeta, removedMeta)
stats.Checked += checkedMeta
stats.Removed += removedMeta
// 3) Optional: legacy /generated/<id>
genRoot, err := generatedRoot()
if err == nil {
genRoot = strings.TrimSpace(genRoot)
}
if err != nil || genRoot == "" {
return stats
}
reserved := map[string]struct{}{
"meta": {},
"covers": {},
"cover": {},
"temp": {},
"tmp": {},
".trash": {},
}
removedLegacy := 0
checkedLegacy := 0
if entries, err := os.ReadDir(genRoot); err == nil {
for _, e := range entries {
if !e.IsDir() {
continue
}
name := strings.TrimSpace(e.Name())
if name == "" || strings.HasPrefix(name, ".") {
continue
}
if _, ok := reserved[strings.ToLower(name)]; ok {
continue
}
checkedLegacy++
if _, ok := live[name]; ok {
continue
}
removeGeneratedForID(name)
removedLegacy++
}
}
if checkedLegacy > 0 || removedLegacy > 0 {
fmt.Printf("🧹 [gc] generated legacy checked=%d removed_orphans=%d\n", checkedLegacy, removedLegacy)
}
stats.Checked += checkedLegacy
stats.Removed += removedLegacy
return stats
}

View File

@ -1,202 +0,0 @@
package main
import (
"context"
"fmt"
"math/rand"
"net/http"
"os"
"os/exec"
"path/filepath"
"strings"
"time"
)
func serveTeaserFile(w http.ResponseWriter, r *http.Request, path string) {
f, err := openForReadShareDelete(path)
if err != nil {
http.Error(w, "datei öffnen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
defer f.Close()
fi, err := f.Stat()
if err != nil || fi.IsDir() || fi.Size() == 0 {
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
return
}
w.Header().Set("Cache-Control", "public, max-age=31536000")
w.Header().Set("Content-Type", "video/mp4")
http.ServeContent(w, r, filepath.Base(path), fi.ModTime(), f)
}
// tolerante Input-Flags für kaputte/abgeschnittene H264/TS Streams
var ffmpegInputTol = []string{
"-fflags", "+discardcorrupt+genpts",
"-err_detect", "ignore_err",
"-max_error_rate", "1.0",
}
var coverModelStore *ModelStore
func setCoverModelStore(s *ModelStore) {
coverModelStore = s
// random seed (einmalig)
rand.Seed(time.Now().UnixNano())
}
func generateTeaserMP4(ctx context.Context, srcPath, outPath string, startSec, durSec float64) error {
if durSec <= 0 {
durSec = 8
}
if startSec < 0 {
startSec = 0
}
// temp schreiben -> rename
tmp := outPath + ".tmp.mp4"
args := []string{
"-y",
"-hide_banner",
"-loglevel", "error",
}
args = append(args, ffmpegInputTol...)
args = append(args,
"-ss", fmt.Sprintf("%.3f", startSec),
"-i", srcPath,
"-t", fmt.Sprintf("%.3f", durSec),
// Video
"-vf", "scale=720:-2",
"-map", "0:v:0",
// Audio (optional: falls kein Audio vorhanden ist, bricht ffmpeg NICHT ab)
"-map", "0:a:0",
"-c:a", "aac",
"-b:a", "128k",
"-ac", "2",
"-c:v", "libx264",
"-preset", "veryfast",
"-crf", "28",
"-pix_fmt", "yuv420p",
// Wenn Audio minimal kürzer/länger ist, sauber beenden
"-shortest",
"-movflags", "+faststart",
"-f", "mp4",
tmp,
)
cmd := exec.CommandContext(ctx, ffmpegPath, args...)
if out, err := cmd.CombinedOutput(); err != nil {
_ = os.Remove(tmp)
return fmt.Errorf("ffmpeg teaser failed: %v (%s)", err, strings.TrimSpace(string(out)))
}
_ = os.Remove(outPath)
return os.Rename(tmp, outPath)
}
func generatedTeaser(w http.ResponseWriter, r *http.Request) {
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
var err error
id, err = sanitizeID(id)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
outPath, err := findFinishedFileByID(id)
if err != nil {
http.Error(w, "preview nicht verfügbar", http.StatusNotFound)
return
}
if err := ensureGeneratedDirs(); err != nil {
http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError)
return
}
assetID := stripHotPrefix(id)
if assetID == "" {
assetID = id
}
assetDir, err := ensureGeneratedDir(assetID)
if err != nil {
http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError)
return
}
previewPath := filepath.Join(assetDir, "preview.mp4")
// ✅ NEU: noGenerate=1 -> niemals on-the-fly erzeugen, nur liefern wenn vorhanden
qNoGen := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("noGenerate")))
noGen := qNoGen == "1" || qNoGen == "true" || qNoGen == "yes"
// Cache hit (neu)
if fi, err := os.Stat(previewPath); err == nil && !fi.IsDir() && fi.Size() > 0 {
serveTeaserFile(w, r, previewPath)
return
}
// Legacy: generated/teaser/<id>_teaser.mp4 oder <id>.mp4
if teaserLegacy, _ := generatedTeaserRoot(); strings.TrimSpace(teaserLegacy) != "" {
cids := []string{assetID, id}
for _, cid := range cids {
candidates := []string{
filepath.Join(teaserLegacy, cid+"_teaser.mp4"),
filepath.Join(teaserLegacy, cid+".mp4"),
}
for _, c := range candidates {
if fi, err := os.Stat(c); err == nil && !fi.IsDir() && fi.Size() > 0 {
if _, err2 := os.Stat(previewPath); os.IsNotExist(err2) {
_ = os.MkdirAll(filepath.Dir(previewPath), 0o755)
_ = os.Rename(c, previewPath)
}
if fi2, err2 := os.Stat(previewPath); err2 == nil && !fi2.IsDir() && fi2.Size() > 0 {
serveTeaserFile(w, r, previewPath)
return
}
serveTeaserFile(w, r, c)
return
}
}
}
}
// ✅ NEU: wenn noGenerate aktiv und bisher kein Teaser gefunden -> 404
if noGen {
http.Error(w, "preview nicht verfügbar", http.StatusNotFound)
return
}
// Neu erzeugen
if err := genSem.Acquire(r.Context()); err != nil {
http.Error(w, "abgebrochen: "+err.Error(), http.StatusRequestTimeout)
return
}
defer genSem.Release()
genCtx, cancel := context.WithTimeout(r.Context(), 3*time.Minute)
defer cancel()
if err := generateTeaserClipsMP4(genCtx, outPath, previewPath, 1.0, 18); err != nil {
// Fallback: einzelner kurzer Teaser ab Anfang (trifft seltener kaputte Stellen)
if err2 := generateTeaserMP4(genCtx, outPath, previewPath, 0, 8); err2 != nil {
http.Error(w, "konnte preview nicht erzeugen: "+err.Error()+" (fallback ebenfalls fehlgeschlagen: "+err2.Error()+")", http.StatusInternalServerError)
return
}
}
serveTeaserFile(w, r, previewPath)
}

674
backend/live.go Normal file
View File

@ -0,0 +1,674 @@
// backend/live.go
package main
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"net/http"
"net/url"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"strings"
"time"
)
// ============================================================
// HLS Live Preview serving (+ m3u8 rewrite)
// ============================================================
//
// This file contains everything related to the HLS live preview stream:
// - serving index*.m3u8 + segment files from a job's PreviewDir
// - rewriting m3u8 segment URLs to a configurable base path
// - starting/stopping the ffmpeg HLS preview process (per job)
// - hover/play activation checks + preview "touch" + ensure-start logic
//
// It intentionally reuses existing globals/types from your backend (package main):
// - jobs, jobsMu, RecordJob, JobRunning
// - ffmpegPath, previewSem
// - notifyJobsChanged()
// - assetIDForJob(job *RecordJob) string
// - startLiveThumbWebPLoop(ctx, job)
// ============================================================
// Allowed files that may be served out of PreviewDir.
var previewFileRe = regexp.MustCompile(`^(index(_hq)?\.m3u8|seg_(low|hq)_\d+\.ts|seg_\d+\.ts|init\.m4s|\w+\.m4s)$`)
func serveLiveNotReady(w http.ResponseWriter, r *http.Request) {
// ✅ Für HLS-Clients (hls.js) ist 204 beim Manifest "ein Fehler" -> aggressive Retries.
// Deshalb: IMMER 200 + gültige (aber leere) m3u8 zurückgeben.
w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8")
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("Retry-After", "1")
if r.Method == http.MethodHead {
w.WriteHeader(http.StatusOK)
return
}
body := "#EXTM3U\n" +
"#EXT-X-VERSION:3\n" +
"#EXT-X-TARGETDURATION:2\n" +
"#EXT-X-MEDIA-SEQUENCE:0\n"
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(body))
}
// stopPreview stops the running ffmpeg HLS preview process for a job and resets state.
func stopPreview(job *RecordJob) {
jobsMu.Lock()
cmd := job.previewCmd
cancel := job.previewCancel
job.previewCmd = nil
job.previewCancel = nil
job.LiveThumbStarted = false
job.PreviewDir = ""
jobsMu.Unlock()
if cancel != nil {
cancel()
}
if cmd != nil && cmd.Process != nil {
_ = cmd.Process.Kill()
}
}
func recordPreviewLive(w http.ResponseWriter, r *http.Request) {
// identisch zu /api/preview, aber m3u8 rewriting soll auf /api/preview/live zeigen
recordPreviewWithBase(w, r, "/api/preview/live")
}
// servePreviewHLSFileWithBase serves a single HLS file (index/segment) for a job.
// If it's an m3u8, it is rewritten so that segment URIs point at basePath.
func servePreviewHLSFileWithBase(w http.ResponseWriter, r *http.Request, id, file, basePath string) {
file = strings.TrimSpace(file)
if file == "" || filepath.Base(file) != file || !previewFileRe.MatchString(file) {
http.Error(w, "ungültige file", http.StatusBadRequest)
return
}
isIndex := file == "index.m3u8" || file == "index_hq.m3u8"
jobsMu.Lock()
job, ok := jobs[id]
state := ""
if ok && job != nil {
state = strings.TrimSpace(job.PreviewState)
}
jobsMu.Unlock()
// HEAD: quick existence check
if r.Method == http.MethodHead {
if !ok || job == nil {
w.WriteHeader(http.StatusNotFound)
return
}
if state == "private" {
w.WriteHeader(http.StatusForbidden)
return
}
if state == "offline" {
w.WriteHeader(http.StatusNotFound)
return
}
previewDir := strings.TrimSpace(job.PreviewDir)
if previewDir == "" {
w.WriteHeader(http.StatusNotFound)
return
}
p := filepath.Join(previewDir, file)
if st, err := os.Stat(p); err == nil && !st.IsDir() {
w.Header().Set("Cache-Control", "no-store")
w.WriteHeader(http.StatusOK)
return
}
w.WriteHeader(http.StatusNotFound)
return
}
// activation: hover or play=1
active := isHover(r) || strings.TrimSpace(r.URL.Query().Get("play")) == "1"
if !active {
if isIndex {
serveLiveNotReady(w, r)
return
}
http.Error(w, "preview not active", http.StatusNotFound)
return
}
if !ok || job == nil {
if isIndex {
serveLiveNotReady(w, r)
return
}
http.Error(w, "job nicht gefunden", http.StatusNotFound)
return
}
ensurePreviewStarted(r, job)
touchPreview(job)
jobsMu.Lock()
state = strings.TrimSpace(job.PreviewState)
jobsMu.Unlock()
if state == "private" {
http.Error(w, "model private", http.StatusForbidden)
return
}
if state == "offline" {
http.Error(w, "model offline", http.StatusNotFound)
return
}
if state == "error" {
http.Error(w, "preview error", http.StatusServiceUnavailable)
return
}
previewDir := strings.TrimSpace(job.PreviewDir)
if previewDir == "" {
if isIndex {
serveLiveNotReady(w, r)
return
}
http.Error(w, "preview nicht verfügbar", http.StatusNotFound)
return
}
p := filepath.Join(previewDir, file)
st, err := os.Stat(p)
if err != nil || st.IsDir() {
if isIndex {
serveLiveNotReady(w, r)
return
}
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
return
}
ext := strings.ToLower(filepath.Ext(p))
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("X-Accel-Buffering", "no")
// m3u8 -> rewrite
if ext == ".m3u8" {
raw, err := os.ReadFile(p)
if err != nil {
http.Error(w, "m3u8 read failed", http.StatusInternalServerError)
return
}
rewritten := rewriteM3U8WithBase(raw, id, basePath)
w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(rewritten)
return
}
switch ext {
case ".ts":
w.Header().Set("Content-Type", "video/mp2t")
case ".m4s":
w.Header().Set("Content-Type", "video/iso.segment")
default:
w.Header().Set("Content-Type", "application/octet-stream")
}
// segments may still be written -> wait until size stabilizes
if ext == ".ts" || ext == ".m4s" {
if !waitForStableFile(p, 2, 120*time.Millisecond) {
http.Error(w, "segment not ready", http.StatusNotFound)
return
}
}
f, err := os.Open(p)
if err != nil {
http.Error(w, "open failed", http.StatusNotFound)
return
}
defer f.Close()
http.ServeContent(w, r, file, st.ModTime(), f)
}
func waitForStableFile(path string, checks int, interval time.Duration) bool {
var last int64 = -1
for i := 0; i < checks; i++ {
st, err := os.Stat(path)
if err != nil || st.IsDir() {
return false
}
sz := st.Size()
if last >= 0 && sz == last {
return true
}
last = sz
time.Sleep(interval)
}
return false
}
func classifyPreviewFFmpegStderr(stderr string) (state string, httpStatus int) {
s := strings.ToLower(stderr)
if strings.Contains(s, "403 forbidden") || strings.Contains(s, "http error 403") || strings.Contains(s, "server returned 403") {
return "private", http.StatusForbidden
}
if strings.Contains(s, "404 not found") || strings.Contains(s, "http error 404") || strings.Contains(s, "server returned 404") {
return "offline", http.StatusNotFound
}
return "", 0
}
// startPreviewHLS starts ffmpeg to generate HLS segments in previewDir.
// It also starts your existing live-thumb loop: startLiveThumbWebPLoop(ctx, job).
func startPreviewHLS(ctx context.Context, job *RecordJob, m3u8URL, previewDir, httpCookie, userAgent string) error {
if strings.TrimSpace(ffmpegPath) == "" {
return fmt.Errorf("kein ffmpeg gefunden setze FFMPEG_PATH oder lege ffmpeg(.exe) neben das Backend")
}
if err := os.MkdirAll(previewDir, 0o755); err != nil {
return err
}
jobsMu.Lock()
job.PreviewState = ""
job.PreviewStateAt = ""
job.PreviewStateMsg = ""
jobsMu.Unlock()
notifyJobsChanged()
commonIn := []string{"-y"}
if strings.TrimSpace(userAgent) != "" {
commonIn = append(commonIn, "-user_agent", userAgent)
}
if strings.TrimSpace(httpCookie) != "" {
commonIn = append(commonIn, "-headers", fmt.Sprintf("Cookie: %s\r\n", httpCookie))
}
commonIn = append(commonIn, "-i", m3u8URL)
hqArgs := append(commonIn,
"-vf", "scale=480:-2",
"-c:v", "libx264", "-preset", "veryfast", "-tune", "zerolatency",
"-pix_fmt", "yuv420p",
"-profile:v", "main",
"-level", "3.1",
"-threads", "4",
"-g", "48", "-keyint_min", "48", "-sc_threshold", "0",
"-map", "0:v:0",
"-map", "0:a:0?",
"-c:a", "aac", "-b:a", "128k", "-ac", "2",
"-f", "hls",
"-hls_time", "2",
"-hls_list_size", "6",
"-hls_allow_cache", "0",
"-hls_flags", "delete_segments+append_list+independent_segments+temp_file",
"-hls_segment_filename", filepath.Join(previewDir, "seg_hq_%05d.ts"),
filepath.Join(previewDir, "index_hq.m3u8"),
)
cmd := exec.CommandContext(ctx, ffmpegPath, hqArgs...)
var stderr bytes.Buffer
cmd.Stderr = &stderr
jobsMu.Lock()
job.previewCmd = cmd
jobsMu.Unlock()
go func() {
if err := previewSem.Acquire(ctx); err != nil {
jobsMu.Lock()
if job.previewCmd == cmd {
job.previewCmd = nil
}
jobsMu.Unlock()
return
}
defer previewSem.Release()
if err := cmd.Run(); err != nil && ctx.Err() == nil {
st := strings.TrimSpace(stderr.String())
state, code := classifyPreviewFFmpegStderr(st)
jobsMu.Lock()
if state != "" {
job.PreviewState = state
job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano)
job.PreviewStateMsg = fmt.Sprintf("ffmpeg input returned HTTP %d", code)
} else {
job.PreviewState = "error"
job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano)
if len(st) > 280 {
job.PreviewStateMsg = st[:280] + "…"
} else {
job.PreviewStateMsg = st
}
}
jobsMu.Unlock()
notifyJobsChanged()
fmt.Printf("⚠️ preview hq ffmpeg failed: %v (%s)\n", err, st)
}
jobsMu.Lock()
if job.previewCmd == cmd {
job.previewCmd = nil
}
jobsMu.Unlock()
}()
startLiveThumbWebPLoop(ctx, job)
return nil
}
// rewriteM3U8WithBase rewrites all segment URIs inside an m3u8 to point at basePath.
//
// Example output line:
//
// /api/preview/live?id=<id>&file=seg_hq_00001.ts&play=1
func rewriteM3U8WithBase(raw []byte, id string, basePath string) []byte {
basePath = strings.TrimSpace(basePath)
if basePath == "" {
basePath = "/api/preview"
}
if !strings.HasPrefix(basePath, "/") {
basePath = "/" + basePath
}
base := basePath + "?id=" + url.QueryEscape(id) + "&file="
var out bytes.Buffer
sc := bufio.NewScanner(bytes.NewReader(raw))
for sc.Scan() {
line := sc.Text()
trim := strings.TrimSpace(line)
if trim == "" {
out.WriteByte('\n')
continue
}
// tags: may contain URI="..."
if strings.HasPrefix(trim, "#") {
line = rewriteAttrURIWithBase(line, base, basePath)
out.WriteString(line)
out.WriteByte('\n')
continue
}
u := trim
// absolute URLs: keep
if strings.HasPrefix(u, "http://") || strings.HasPrefix(u, "https://") {
out.WriteString(line)
out.WriteByte('\n')
continue
}
// already points to our endpoint: keep
if strings.Contains(u, basePath) || strings.Contains(u, "/api/preview") {
out.WriteString(line)
out.WriteByte('\n')
continue
}
name := path.Base(u)
out.WriteString(base + url.QueryEscape(name) + "&play=1")
out.WriteByte('\n')
}
if err := sc.Err(); err != nil {
return raw
}
return out.Bytes()
}
func rewriteAttrURIWithBase(line, base string, basePath string) string {
const key = `URI="`
i := strings.Index(line, key)
if i < 0 {
return line
}
j := strings.Index(line[i+len(key):], `"`)
if j < 0 {
return line
}
start := i + len(key)
end := start + j
val := line[start:end]
valTrim := strings.TrimSpace(val)
// keep absolute or already-rewritten URIs
if strings.HasPrefix(valTrim, "http://") || strings.HasPrefix(valTrim, "https://") {
return line
}
if strings.Contains(valTrim, basePath) || strings.Contains(valTrim, "/api/preview") {
return line
}
name := path.Base(valTrim)
repl := base + url.QueryEscape(name) + "&play=1"
return line[:start] + repl + line[end:]
}
// isHover decides whether this request should count as "active".
func isHover(r *http.Request) bool {
v := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("hover")))
return v == "1" || v == "true" || v == "yes"
}
// touchPreview updates the last-hit timestamp so your cleanup/stop logic can use it.
func touchPreview(job *RecordJob) {
if job == nil {
return
}
jobsMu.Lock()
job.previewLastHit = time.Now()
jobsMu.Unlock()
}
// ensurePreviewStarted starts the ffmpeg HLS preview if not running yet.
func ensurePreviewStarted(r *http.Request, job *RecordJob) {
if job == nil {
return
}
job.previewStartMu.Lock()
defer job.previewStartMu.Unlock()
jobsMu.Lock()
if job.previewCmd != nil && job.PreviewDir != "" {
job.previewLastHit = time.Now()
jobsMu.Unlock()
return
}
m3u8 := strings.TrimSpace(job.PreviewM3U8)
cookie := strings.TrimSpace(job.PreviewCookie)
ua := strings.TrimSpace(job.PreviewUA)
jobsMu.Unlock()
if m3u8 == "" {
return
}
pctx, cancel := context.WithCancel(context.Background())
assetID := assetIDForJob(job)
pdir := filepath.Join(os.TempDir(), "rec_preview", assetID)
jobsMu.Lock()
job.PreviewDir = pdir
job.previewCancel = cancel
job.previewLastHit = time.Now()
jobsMu.Unlock()
_ = startPreviewHLS(pctx, job, m3u8, pdir, cookie, ua)
}
// ============================================================
// Live fMP4 (single request, chunked) via ffmpeg -> stdout
// Route: /api/preview/live-fmp4?id=<jobId>&hover=1
// ============================================================
func recordPreviewLiveFMP4(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed)
return
}
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
// activation: hover or play=1 (wie bei HLS)
active := isHover(r) || strings.TrimSpace(r.URL.Query().Get("play")) == "1"
if !active {
http.Error(w, "preview not active", http.StatusNotFound)
return
}
jobsMu.Lock()
job, ok := jobs[id]
state := ""
if ok && job != nil {
state = strings.TrimSpace(job.PreviewState)
}
jobsMu.Unlock()
if !ok || job == nil {
http.Error(w, "job nicht gefunden", http.StatusNotFound)
return
}
// ensure ffmpeg preview input data exists
// (PreviewM3U8 + Cookie/UA werden beim Job gesetzt)
m3u8 := strings.TrimSpace(job.PreviewM3U8)
if m3u8 == "" {
http.Error(w, "preview m3u8 fehlt", http.StatusNotFound)
return
}
// states
if state == "private" {
http.Error(w, "model private", http.StatusForbidden)
return
}
if state == "offline" {
http.Error(w, "model offline", http.StatusNotFound)
return
}
if state == "error" {
http.Error(w, "preview error", http.StatusServiceUnavailable)
return
}
// Headers: fMP4 stream
w.Header().Set("Content-Type", `video/mp4`)
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("X-Accel-Buffering", "no")
// Sehr wichtig: Flushbar?
flusher, okf := w.(http.Flusher)
if !okf {
http.Error(w, "Streaming nicht unterstützt", http.StatusInternalServerError)
return
}
// Client disconnect => ffmpeg stoppen
ctx := r.Context()
// Cookie/UA aus Job
cookie := strings.TrimSpace(job.PreviewCookie)
ua := strings.TrimSpace(job.PreviewUA)
if ua == "" {
ua = "Mozilla/5.0"
}
// ffmpeg args: input = m3u8, output = fragmented mp4 to stdout
// ✅ nur Video (kein Audio), damit MSE codecs stabil sind
args := []string{"-hide_banner", "-loglevel", "error"}
if ua != "" {
args = append(args, "-user_agent", ua)
}
if cookie != "" {
args = append(args, "-headers", fmt.Sprintf("Cookie: %s\r\n", cookie))
}
// Input
args = append(args, "-i", m3u8)
// Video encode (low-latency-ish)
args = append(args,
"-an",
"-vf", "scale=480:-2",
"-c:v", "libx264",
"-preset", "veryfast",
"-tune", "zerolatency",
"-pix_fmt", "yuv420p",
"-profile:v", "main",
"-level", "3.1",
"-g", "48",
"-keyint_min", "48",
"-sc_threshold", "0",
)
// Output: fMP4 fragmented to stdout (single HTTP response)
args = append(args,
"-f", "mp4",
"-movflags", "frag_keyframe+empty_moov+default_base_moof",
"-frag_duration", "2000000", // 2s (µs)
"-min_frag_duration", "2000000",
"pipe:1",
)
cmd := exec.CommandContext(ctx, ffmpegPath, args...)
// stdout -> response
stdout, err := cmd.StdoutPipe()
if err != nil {
http.Error(w, "ffmpeg stdout pipe failed", http.StatusInternalServerError)
return
}
// stderr nur für Debug (optional)
var stderr bytes.Buffer
cmd.Stderr = &stderr
// Start
if err := cmd.Start(); err != nil {
http.Error(w, "ffmpeg start failed: "+err.Error(), http.StatusInternalServerError)
return
}
// Wenn Client weg => Prozess killt CommandContext sowieso (ctx cancels),
// aber wir kopieren streaming-mäßig.
buf := make([]byte, 32*1024)
for {
select {
case <-ctx.Done():
_ = cmd.Process.Kill()
return
default:
}
n, rerr := stdout.Read(buf)
if n > 0 {
_, _ = w.Write(buf[:n])
flusher.Flush()
}
if rerr != nil {
if rerr == io.EOF {
break
}
break
}
}
// Wait (verhindert Zombies)
_ = cmd.Wait()
}

View File

@ -109,6 +109,35 @@ type ffprobeInfo struct {
Streams []ffprobeStreamInfo `json:"streams"` Streams []ffprobeStreamInfo `json:"streams"`
} }
func jobMatchesModelKey(j *RecordJob, modelKey string) bool {
if j == nil {
return false
}
mk := strings.ToLower(strings.TrimSpace(modelKey))
if mk == "" {
return false
}
// 1) Output-Name (bei dir: <model>_MM_DD_YYYY__HH-MM-SS...)
out := strings.TrimSpace(j.Output)
if out != "" {
stem := strings.TrimSuffix(filepath.Base(out), filepath.Ext(out))
// modelNameFromFilename() hast du schon irgendwo (wird in modelKeyFromFilenameOrPath benutzt)
guess := strings.ToLower(strings.TrimSpace(modelNameFromFilename(stripHotPrefix(stem))))
if guess == mk {
return true
}
}
// 2) Fallback: SourceURL enthält /<model>
src := strings.ToLower(strings.TrimSpace(j.SourceURL))
if src != "" && strings.Contains(src, "/"+mk) {
return true
}
return false
}
func parseFFRate(s string) float64 { func parseFFRate(s string) float64 {
s = strings.TrimSpace(s) s = strings.TrimSpace(s)
if s == "" || s == "0/0" { if s == "" || s == "0/0" {

View File

@ -158,8 +158,8 @@ func (s *ModelStore) init() error {
return err return err
} }
db.SetMaxOpenConns(5) db.SetMaxOpenConns(10)
db.SetMaxIdleConns(5) db.SetMaxIdleConns(10)
if err := db.Ping(); err != nil { if err := db.Ping(); err != nil {
_ = db.Close() _ = db.Close()

View File

@ -1,8 +1,10 @@
// backend/postwork_queue.go // backend/postwork.go
package main package main
import ( import (
"context" "context"
"reflect"
"strings"
"sync" "sync"
"time" "time"
) )
@ -199,4 +201,39 @@ func (pq *PostWorkQueue) StatusForKey(key string) PostWorkKeyStatus {
} }
// global (oder in deinem app struct halten) // global (oder in deinem app struct halten)
var postWorkQ = NewPostWorkQueue(512, 4) // maxParallelFFmpeg = 2 var postWorkQ = NewPostWorkQueue(512, 4) // maxParallelFFmpeg = 4
// --- Status Refresher (ehemals postwork_refresh.go) ---
func startPostWorkStatusRefresher() {
t := time.NewTicker(1 * time.Second)
go func() {
defer t.Stop()
for range t.C {
changed := false
jobsMu.Lock()
for _, job := range jobs {
key := strings.TrimSpace(job.PostWorkKey)
if key == "" {
continue
}
st := postWorkQ.StatusForKey(key)
// ✅ Kein Typname nötig: job.PostWork ist *<StatusType>, st ist <StatusType>
if job.PostWork == nil || !reflect.DeepEqual(*job.PostWork, st) {
tmp := st
job.PostWork = &tmp
changed = true
}
}
jobsMu.Unlock()
if changed {
notifyJobsChanged()
}
}
}()
}

View File

@ -1,42 +0,0 @@
// backend\postwork_refresh.go
package main
import (
"reflect"
"strings"
"time"
)
func startPostWorkStatusRefresher() {
t := time.NewTicker(1 * time.Second)
go func() {
defer t.Stop()
for range t.C {
changed := false
jobsMu.Lock()
for _, job := range jobs {
key := strings.TrimSpace(job.PostWorkKey)
if key == "" {
continue
}
st := postWorkQ.StatusForKey(key)
// ✅ Kein Typname nötig: job.PostWork ist *<StatusType>, st ist <StatusType>
if job.PostWork == nil || !reflect.DeepEqual(*job.PostWork, st) {
tmp := st
job.PostWork = &tmp
changed = true
}
}
jobsMu.Unlock()
if changed {
notifyJobsChanged()
}
}
}()
}

2286
backend/preview.go Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,393 +0,0 @@
// backend\preview_hls.go
package main
import (
"bytes"
"context"
"fmt"
"net/http"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
"time"
)
var previewFileRe = regexp.MustCompile(`^(index(_hq)?\.m3u8|seg_(low|hq)_\d+\.ts|seg_\d+\.ts)$`)
func serveEmptyLiveM3U8(w http.ResponseWriter, r *http.Request) {
// Für Player: gültige Playlist statt 204 liefern
w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8")
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("X-Content-Type-Options", "nosniff")
// Optional: Player/Proxy darf schnell retryen
w.Header().Set("Retry-After", "1")
// Bei HEAD nur Header schicken
if r.Method == http.MethodHead {
w.WriteHeader(http.StatusOK)
return
}
// Minimal gültige LIVE-Playlist (keine Segmente, kein ENDLIST)
// Viele Player bleiben damit im "loading", statt hart zu failen.
body := "#EXTM3U\n" +
"#EXT-X-VERSION:3\n" +
"#EXT-X-TARGETDURATION:2\n" +
"#EXT-X-MEDIA-SEQUENCE:0\n"
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte(body))
}
func stopPreview(job *RecordJob) {
jobsMu.Lock()
cmd := job.previewCmd
cancel := job.previewCancel
job.previewCmd = nil
job.previewCancel = nil
job.LiveThumbStarted = false
job.PreviewDir = ""
jobsMu.Unlock()
if cancel != nil {
cancel()
}
if cmd != nil && cmd.Process != nil {
_ = cmd.Process.Kill()
}
}
func servePreviewHLSFile(w http.ResponseWriter, r *http.Request, id, file string) {
file = strings.TrimSpace(file)
if file == "" || filepath.Base(file) != file || !previewFileRe.MatchString(file) {
http.Error(w, "ungültige file", http.StatusBadRequest)
return
}
isIndex := file == "index.m3u8" || file == "index_hq.m3u8"
jobsMu.Lock()
job, ok := jobs[id]
state := ""
if ok && job != nil {
state = strings.TrimSpace(job.PreviewState)
}
jobsMu.Unlock()
// =========================
// ✅ HEAD = nur Existenzcheck (kein hover nötig, kein Preview-Start)
// =========================
if r.Method == http.MethodHead {
if !ok || job == nil {
w.WriteHeader(http.StatusNotFound)
return
}
if state == "private" {
w.WriteHeader(http.StatusForbidden)
return
}
if state == "offline" {
w.WriteHeader(http.StatusNotFound)
return
}
previewDir := strings.TrimSpace(job.PreviewDir)
if previewDir == "" {
w.WriteHeader(http.StatusNotFound)
return
}
p := filepath.Join(previewDir, file)
if st, err := os.Stat(p); err == nil && !st.IsDir() {
w.Header().Set("Cache-Control", "no-store")
w.WriteHeader(http.StatusOK)
return
}
w.WriteHeader(http.StatusNotFound)
return
}
// =========================
// ✅ NEU: Player darf Preview auch ohne Hover starten
// - Frontend hängt &play=1 an (empfohlen)
// - Wir akzeptieren zusätzlich: play=1 => treat as active
// =========================
active := isHover(r) || strings.TrimSpace(r.URL.Query().Get("play")) == "1"
if !active {
// Kein Hover/Play => niemals Live-HLS abgreifen
if isIndex {
serveEmptyLiveM3U8(w, r)
return
}
http.Error(w, "preview not active", http.StatusNotFound)
return
}
// active => wenn Job unbekannt, sauber raus
if !ok || job == nil {
if isIndex {
serveEmptyLiveM3U8(w, r)
return
}
http.Error(w, "job nicht gefunden", http.StatusNotFound)
return
}
// active => Preview starten/keepalive
ensurePreviewStarted(r, job)
touchPreview(job)
// state ggf. nach Start nochmal lesen
jobsMu.Lock()
state = strings.TrimSpace(job.PreviewState)
jobsMu.Unlock()
if state == "private" {
http.Error(w, "model private", http.StatusForbidden)
return
}
if state == "offline" {
http.Error(w, "model offline", http.StatusNotFound)
return
}
if state == "error" {
http.Error(w, "preview error", http.StatusServiceUnavailable)
return
}
previewDir := strings.TrimSpace(job.PreviewDir)
if previewDir == "" {
if isIndex {
serveEmptyLiveM3U8(w, r)
return
}
http.Error(w, "preview nicht verfügbar", http.StatusNotFound)
return
}
p := filepath.Join(previewDir, file)
st, err := os.Stat(p)
if err != nil || st.IsDir() {
if isIndex {
serveEmptyLiveM3U8(w, r)
return
}
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
return
}
ext := strings.ToLower(filepath.Ext(p))
// ✅ common: always no-store
w.Header().Set("Cache-Control", "no-store")
// ✅ avoids some proxy buffering surprises (harmless if ignored)
w.Header().Set("X-Accel-Buffering", "no")
// =========================
// ✅ .m3u8: rewrite (klein, ReadFile ok)
// =========================
if ext == ".m3u8" {
raw, err := os.ReadFile(p)
if err != nil {
http.Error(w, "m3u8 read failed", http.StatusInternalServerError)
return
}
rewritten := rewriteM3U8(raw, id)
w.Header().Set("Content-Type", "application/vnd.apple.mpegurl; charset=utf-8")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(rewritten)
return
}
// =========================
// ✅ Segmente: robust streamen + Range-support
// =========================
switch ext {
case ".ts":
w.Header().Set("Content-Type", "video/mp2t")
case ".m4s":
w.Header().Set("Content-Type", "video/iso.segment")
default:
w.Header().Set("Content-Type", "application/octet-stream")
}
// ✅ Optional aber sehr hilfreich:
// liefere ein Segment erst aus, wenn es nicht mehr wächst (verhindert "hängende" große .ts)
if ext == ".ts" || ext == ".m4s" {
if !waitForStableFile(p, 2, 120*time.Millisecond) {
// Segment ist vermutlich noch im Schreiben -> lieber 404, Player retryt
http.Error(w, "segment not ready", http.StatusNotFound)
return
}
}
f, err := os.Open(p)
if err != nil {
http.Error(w, "open failed", http.StatusNotFound)
return
}
defer f.Close()
// ✅ ServeContent macht Range korrekt und streamt ohne ReadAll.
// name ist nur für logs/cache; modTime für If-Modified-Since etc.
http.ServeContent(w, r, file, st.ModTime(), f)
}
func waitForStableFile(path string, checks int, interval time.Duration) bool {
// returns true if size is stable across N checks
var last int64 = -1
for i := 0; i < checks; i++ {
st, err := os.Stat(path)
if err != nil || st.IsDir() {
return false
}
sz := st.Size()
if last >= 0 && sz == last {
return true
}
last = sz
time.Sleep(interval)
}
// if we never saw stability, assume not ready
return false
}
func classifyPreviewFFmpegStderr(stderr string) (state string, httpStatus int) {
s := strings.ToLower(stderr)
// ffmpeg schreibt typischerweise:
// "HTTP error 403 Forbidden" oder "Server returned 403 Forbidden"
if strings.Contains(s, "403 forbidden") || strings.Contains(s, "http error 403") || strings.Contains(s, "server returned 403") {
return "private", http.StatusForbidden
}
// "HTTP error 404 Not Found" oder "Server returned 404 Not Found"
if strings.Contains(s, "404 not found") || strings.Contains(s, "http error 404") || strings.Contains(s, "server returned 404") {
return "offline", http.StatusNotFound
}
return "", 0
}
func startPreviewHLS(ctx context.Context, job *RecordJob, m3u8URL, previewDir, httpCookie, userAgent string) error {
if strings.TrimSpace(ffmpegPath) == "" {
return fmt.Errorf("kein ffmpeg gefunden setze FFMPEG_PATH oder lege ffmpeg(.exe) neben das Backend")
}
if err := os.MkdirAll(previewDir, 0755); err != nil {
return err
}
// ✅ PreviewState reset (neuer Start)
jobsMu.Lock()
job.PreviewState = ""
job.PreviewStateAt = ""
job.PreviewStateMsg = ""
jobsMu.Unlock()
notifyJobsChanged()
commonIn := []string{"-y"}
if strings.TrimSpace(userAgent) != "" {
commonIn = append(commonIn, "-user_agent", userAgent)
}
if strings.TrimSpace(httpCookie) != "" {
commonIn = append(commonIn, "-headers", fmt.Sprintf("Cookie: %s\r\n", httpCookie))
}
commonIn = append(commonIn, "-i", m3u8URL)
hqArgs := append(commonIn,
"-vf", "scale=480:-2",
"-c:v", "libx264", "-preset", "veryfast", "-tune", "zerolatency",
"-pix_fmt", "yuv420p",
"-profile:v", "main",
"-level", "3.1",
"-threads", "4",
// GOP ~ 2s (bei 24fps). Optional force_key_frames zusätzlich.
"-g", "48", "-keyint_min", "48", "-sc_threshold", "0",
// optional, wenn du noch große Segmente bekommst:
// "-force_key_frames", "expr:gte(t,n_forced*2)",
"-map", "0:v:0",
"-map", "0:a:0?",
"-c:a", "aac", "-b:a", "128k", "-ac", "2",
"-f", "hls",
"-hls_time", "2",
"-hls_list_size", "6",
"-hls_allow_cache", "0",
// ✅ wichtig: temp_file
"-hls_flags", "delete_segments+append_list+independent_segments+temp_file",
"-hls_segment_filename", filepath.Join(previewDir, "seg_hq_%05d.ts"),
// ✅ Empfehlung: weglassen (du rewritest ohnehin)
// "-hls_base_url", baseURL,
filepath.Join(previewDir, "index_hq.m3u8"),
)
cmd := exec.CommandContext(ctx, ffmpegPath, hqArgs...)
var stderr bytes.Buffer
cmd.Stderr = &stderr
jobsMu.Lock()
job.previewCmd = cmd
jobsMu.Unlock()
go func() {
if err := previewSem.Acquire(ctx); err != nil {
jobsMu.Lock()
if job.previewCmd == cmd {
job.previewCmd = nil
}
jobsMu.Unlock()
return
}
defer previewSem.Release()
if err := cmd.Run(); err != nil && ctx.Err() == nil {
st := strings.TrimSpace(stderr.String())
// ✅ 403/404 erkennen -> Private/Offline setzen
state, code := classifyPreviewFFmpegStderr(st)
jobsMu.Lock()
if state != "" {
job.PreviewState = state
job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano)
job.PreviewStateMsg = fmt.Sprintf("ffmpeg input returned HTTP %d", code)
} else {
job.PreviewState = "error"
job.PreviewStateAt = time.Now().UTC().Format(time.RFC3339Nano)
if len(st) > 280 {
job.PreviewStateMsg = st[:280] + "…"
} else {
job.PreviewStateMsg = st
}
}
jobsMu.Unlock()
notifyJobsChanged()
fmt.Printf("⚠️ preview hq ffmpeg failed: %v (%s)\n", err, st)
}
jobsMu.Lock()
if job.previewCmd == cmd {
job.previewCmd = nil
}
jobsMu.Unlock()
}()
// ✅ Live thumb writer starten (schreibt generated/<assetID>/preview.webp regelmäßig neu)
startLiveThumbWebPLoop(ctx, job)
return nil
}

View File

@ -1,100 +0,0 @@
package main
import (
"bufio"
"bytes"
"net/url"
"path"
"strings"
)
func rewriteM3U8(raw []byte, id string) []byte {
// Wir bauen alle URIs so um, dass sie wieder über /api/preview laufen.
// Wichtig: play=1 bleibt dran, damit Folge-Requests (segments, chunklists) auch ohne Hover gehen.
base := "/api/preview?id=" + url.QueryEscape(id) + "&file="
var out bytes.Buffer
sc := bufio.NewScanner(bytes.NewReader(raw))
// Scanner default token limit 64K m3u8 ist normalerweise klein, passt.
// Wenn du riesige Playlists hast, kannst du Buffer erhöhen.
for sc.Scan() {
line := sc.Text()
trim := strings.TrimSpace(line)
if trim == "" {
out.WriteByte('\n')
continue
}
// Kommentare/Tags: ggf. URI="..." in Tags rewriten
if strings.HasPrefix(trim, "#") {
// EXT-X-KEY:URI="..."
line = rewriteAttrURI(line, base)
out.WriteString(line)
out.WriteByte('\n')
continue
}
// Nicht-Tag => URI (segment oder child-playlist)
u := trim
// Absolut? dann lassen
if strings.HasPrefix(u, "http://") || strings.HasPrefix(u, "https://") {
out.WriteString(line)
out.WriteByte('\n')
continue
}
// Wenn es schon unser API ist, lassen
if strings.Contains(u, "/api/preview") {
out.WriteString(line)
out.WriteByte('\n')
continue
}
// Nur basename nehmen (ffmpeg schreibt i.d.R. keine Subdirs)
name := path.Base(u)
// Hier play=1 mitschicken:
out.WriteString(base + url.QueryEscape(name) + "&play=1")
out.WriteByte('\n')
}
if err := sc.Err(); err != nil {
// Wenn Scanner aus irgendeinem Grund scheitert: lieber raw zurück (besser als kaputt)
return raw
}
return out.Bytes()
}
func rewriteAttrURI(line, base string) string {
// Rewritet URI="xyz" in EXT-X-KEY / EXT-X-MAP / EXT-X-MEDIA / EXT-X-I-FRAME-STREAM-INF etc.
// Nur relative URIs werden angefasst.
const key = `URI="`
i := strings.Index(line, key)
if i < 0 {
return line
}
j := strings.Index(line[i+len(key):], `"`)
if j < 0 {
return line
}
start := i + len(key)
end := start + j
val := line[start:end]
valTrim := strings.TrimSpace(val)
// absolut oder schon preview => nix tun
if strings.HasPrefix(valTrim, "http://") || strings.HasPrefix(valTrim, "https://") || strings.Contains(valTrim, "/api/preview") {
return line
}
name := path.Base(valTrim)
repl := base + url.QueryEscape(name) + "&play=1"
return line[:start] + repl + line[end:]
}

View File

@ -1,86 +0,0 @@
package main
import (
"html"
"net/http"
"strings"
)
func servePreviewStatusSVG(w http.ResponseWriter, label string, status int) {
w.Header().Set("Content-Type", "image/svg+xml; charset=utf-8")
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("X-Content-Type-Options", "nosniff")
if status <= 0 {
status = http.StatusOK
}
title := html.EscapeString(strings.TrimSpace(label))
if title == "" {
title = "Preview"
}
// 16:9 (passt zu deinen Cards)
svg := `<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 320 180" preserveAspectRatio="xMidYMid slice">
<defs>
<!-- Subtle gradient background -->
<linearGradient id="bg" x1="0" y1="0" x2="1" y2="1">
<stop offset="0" stop-color="rgba(99,102,241,0.10)"/>
<stop offset="1" stop-color="rgba(14,165,233,0.08)"/>
</linearGradient>
<!-- Soft vignette -->
<radialGradient id="vig" cx="50%" cy="45%" r="75%">
<stop offset="0" stop-color="rgba(0,0,0,0)"/>
<stop offset="1" stop-color="rgba(0,0,0,0.18)"/>
</radialGradient>
<!-- Card shadow -->
<filter id="shadow" x="-20%" y="-20%" width="140%" height="140%">
<feDropShadow dx="0" dy="6" stdDeviation="8" flood-color="rgba(0,0,0,0.18)"/>
</filter>
</defs>
<!-- base -->
<rect x="0" y="0" width="320" height="180" rx="18" fill="rgba(17,24,39,0.06)"/>
<rect x="0" y="0" width="320" height="180" rx="18" fill="url(#bg)"/>
<rect x="0" y="0" width="320" height="180" rx="18" fill="url(#vig)"/>
<!-- inner card -->
<g filter="url(#shadow)">
<rect x="18" y="18" width="284" height="144" rx="16"
fill="rgba(255,255,255,0.72)"
stroke="rgba(0,0,0,0.08)"/>
<rect x="18" y="18" width="284" height="144" rx="16"
fill="rgba(255,255,255,0)"
stroke="rgba(99,102,241,0.18)"
stroke-width="2"
stroke-dasharray="6 6"/>
</g>
<!-- icon -->
<g transform="translate(160 70)">
<circle r="20" fill="rgba(17,24,39,0.08)" stroke="rgba(0,0,0,0.08)"/>
<!-- simple "image-off" icon -->
<path d="M-10 6 L-4 0 L2 6 L10 -2" fill="none" stroke="rgba(17,24,39,0.55)" stroke-width="2.4" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M-10 -6 H10" fill="none" stroke="rgba(17,24,39,0.35)" stroke-width="2.4" stroke-linecap="round"/>
<path d="M-12 12 L12 -12" fill="none" stroke="rgba(239,68,68,0.55)" stroke-width="2.6" stroke-linecap="round"/>
</g>
<!-- text -->
<text x="160" y="118" text-anchor="middle"
font-family="ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto"
font-size="16" font-weight="750"
fill="rgba(17,24,39,0.88)">` + title + `</text>
<text x="160" y="140" text-anchor="middle"
font-family="ui-sans-serif, system-ui, -apple-system, Segoe UI, Roboto"
font-size="11.5" font-weight="650"
fill="rgba(75,85,99,0.82)">Preview nicht verfügbar</text>
</svg>
`
w.WriteHeader(status)
_, _ = w.Write([]byte(svg))
}

View File

@ -1,455 +0,0 @@
// backend\preview_teaser.go
package main
import (
"bufio"
"bytes"
"context"
"fmt"
"math"
"os"
"os/exec"
"strconv"
"strings"
"time"
)
// Minimale Segmentdauer, damit ffmpeg nicht mit zu kurzen Schnipseln zickt.
const minSegmentDuration = 0.75 // Sekunden
const defaultTeaserSegments = 12
type TeaserPreviewOptions struct {
Segments int
SegmentDuration float64
Width int
Preset string
CRF int
// wird von uns "hart" auf true gesetzt (Audio ist NICHT optional)
Audio bool
AudioBitrate string
UseVsync2 bool
}
// stepSizeAndOffset verteilt die Startpunkte über das Video.
// Rückgabe: stepSize, offset (beide in Sekunden).
func (o TeaserPreviewOptions) stepSizeAndOffset(dur float64) (float64, float64) {
if dur <= 0 {
return 0, 0
}
n := o.Segments
if n < 1 {
n = 1
}
segDur := o.SegmentDuration
if segDur <= 0 {
segDur = 1
}
if segDur < minSegmentDuration {
segDur = minSegmentDuration
}
// letzter sinnvoller Start (kleiner Sicherheitsabstand)
maxStart := dur - 0.05 - segDur
if maxStart < 0 {
maxStart = 0
}
// 1 Segment -> Mitte
if n == 1 {
return 0, maxStart * 0.5
}
// kleine Ränder, damit nicht immer ganz am Anfang/Ende
margin := 0.05 * maxStart
if margin < 0 {
margin = 0
}
span := maxStart - 2*margin
if span < 0 {
span = maxStart
margin = 0
}
step := 0.0
if n > 1 {
step = span / float64(n-1)
}
return step, margin
}
func generateTeaserClipsMP4(ctx context.Context, srcPath, outPath string, clipLenSec float64, maxClips int) error {
return generateTeaserClipsMP4WithProgress(ctx, srcPath, outPath, clipLenSec, maxClips, nil)
}
func generateTeaserClipsMP4WithProgress(
ctx context.Context,
srcPath, outPath string,
clipLenSec float64,
maxClips int,
onRatio func(r float64),
) error {
// kompatible Defaults aus deiner Signatur -> Options
opts := TeaserPreviewOptions{
Segments: maxClips,
SegmentDuration: clipLenSec,
// stash-like Defaults
Width: 640,
Preset: "veryfast",
CRF: 21,
Audio: true,
AudioBitrate: "128k",
UseVsync2: false,
}
return generateTeaserPreviewMP4WithProgress(ctx, srcPath, outPath, opts, onRatio)
}
func generateTeaserChunkMP4(ctx context.Context, src, out string, start, dur float64, opts TeaserPreviewOptions) error {
// ✅ Audio ist Pflicht (nicht optional)
opts.Audio = true
tmp := strings.TrimSuffix(out, ".mp4") + ".part.mp4"
segDur := dur
if segDur < minSegmentDuration {
segDur = minSegmentDuration
}
args := []string{
"-y", "-hide_banner", "-loglevel", "error",
}
args = append(args, ffmpegInputTol...)
args = append(args,
"-ss", fmt.Sprintf("%.3f", start),
"-t", fmt.Sprintf("%.3f", segDur),
"-i", src,
"-map", "0:v:0",
"-c:v", "libx264",
"-pix_fmt", "yuv420p",
"-profile:v", "high",
"-level", "4.2",
"-preset", opts.Preset,
"-crf", strconv.Itoa(opts.CRF),
"-threads", "4",
)
if opts.UseVsync2 {
args = append(args, "-vsync", "2")
}
if opts.Audio {
args = append(args,
"-map", "0:a:0", // Audio Pflicht
"-c:a", "aac",
"-b:a", opts.AudioBitrate,
"-ac", "2",
"-shortest",
)
} else {
args = append(args, "-an")
}
args = append(args, "-movflags", "+faststart", tmp)
cmd := exec.CommandContext(ctx, ffmpegPath, args...)
var stderr bytes.Buffer
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
_ = os.Remove(tmp)
return fmt.Errorf("ffmpeg teaser chunk failed: %v (%s)", err, strings.TrimSpace(stderr.String()))
}
_ = os.Remove(out)
return os.Rename(tmp, out)
}
func computeTeaserStarts(dur float64, opts TeaserPreviewOptions) (starts []float64, segDur float64, usedSegments int) {
// opts normalisieren wie in generateTeaserPreviewMP4WithProgress
if opts.SegmentDuration <= 0 {
opts.SegmentDuration = 1
}
if opts.Segments <= 0 {
opts.Segments = defaultTeaserSegments
}
segDur = opts.SegmentDuration
if segDur < minSegmentDuration {
segDur = minSegmentDuration
}
// Kurzvideo-Fallback: wenn Video kürzer als Segments*SegmentDuration -> 1 Segment über ganze Dauer
if dur > 0 && dur < segDur*float64(opts.Segments) {
opts.Segments = 1
segDur = dur
}
usedSegments = opts.Segments
// Dauer unbekannt: Start 0
if !(dur > 0) {
return []float64{0}, segDur, 1
}
stepSize, offset := opts.stepSizeAndOffset(dur)
starts = make([]float64, 0, opts.Segments)
for i := 0; i < opts.Segments; i++ {
t := offset + float64(i)*stepSize
maxStart := math.Max(0, dur-0.05-segDur)
if t < 0 {
t = 0
}
if t > maxStart {
t = maxStart
}
if t < 0.05 {
t = 0.05
}
starts = append(starts, t)
}
return starts, segDur, usedSegments
}
func generateTeaserPreviewMP4WithProgress(
ctx context.Context,
srcPath, outPath string,
opts TeaserPreviewOptions,
onRatio func(r float64),
) error {
// ✅ Audio ist Pflicht (nicht optional)
opts.Audio = true
// Defaults
if opts.SegmentDuration <= 0 {
opts.SegmentDuration = 1
}
if opts.Segments <= 0 {
opts.Segments = defaultTeaserSegments
}
if opts.Width <= 0 {
opts.Width = 640
}
if opts.Preset == "" {
opts.Preset = "veryfast"
}
if opts.CRF <= 0 {
opts.CRF = 21
}
if opts.AudioBitrate == "" {
opts.AudioBitrate = "128k"
}
segDur := opts.SegmentDuration
if segDur < minSegmentDuration {
segDur = minSegmentDuration
}
// Dauer holen (einmalig; wird gecached)
dur, _ := durationSecondsCached(ctx, srcPath)
// Kurzvideo-Fallback wie "die andere":
// Wenn Video kürzer als Segments*SegmentDuration -> Single Preview über komplette Dauer
if dur > 0 && dur < segDur*float64(opts.Segments) {
// als 1 Segment behandeln, Duration = dur
opts.Segments = 1
segDur = dur
}
// Wenn Dauer unbekannt/zu klein: ab 0 ein Stück
if !(dur > 0) {
if onRatio != nil {
onRatio(0)
}
// hier könntest du auch segDur verwenden; ich nehme min(8, segDur) ähnlich wie vorher
err := generateTeaserChunkMP4(ctx, srcPath, outPath, 0, math.Min(8, segDur), opts)
if onRatio != nil {
onRatio(1)
}
return err
}
starts, segDurComputed, _ := computeTeaserStarts(dur, opts)
// segDur ist später im Code benutzt -> segDur damit überschreiben:
segDur = segDurComputed
expectedOutSec := float64(len(starts)) * segDur
tmp := strings.TrimSuffix(outPath, ".mp4") + ".part.mp4"
args := []string{
"-y",
"-nostats",
"-progress", "pipe:1",
"-hide_banner",
"-loglevel", "error",
}
// Inputs: pro Segment eigener -ss/-t/-i (wie bei dir)
for _, t := range starts {
args = append(args, ffmpegInputTol...)
args = append(args,
"-ss", fmt.Sprintf("%.3f", t),
"-t", fmt.Sprintf("%.3f", segDur),
"-i", srcPath,
)
}
// filter_complex bauen
var fc strings.Builder
for i := range starts {
// stash-like: ScaleWidth(640), pix_fmt yuv420p, profile high/level 4.2 später in output args
fmt.Fprintf(&fc,
"[%d:v]scale=%d:-2,setsar=1,setpts=PTS-STARTPTS[v%d];",
i, opts.Width, i,
)
if opts.Audio {
// dein “concat-safe” Audio normalisieren (gute Idee)
fmt.Fprintf(&fc,
"[%d:a]aresample=48000,aformat=channel_layouts=stereo,asetpts=PTS-STARTPTS[a%d];",
i, i,
)
}
}
// interleaved concat inputs
for i := range starts {
if opts.Audio {
fmt.Fprintf(&fc, "[v%d][a%d]", i, i)
} else {
fmt.Fprintf(&fc, "[v%d]", i)
}
}
if opts.Audio {
fmt.Fprintf(&fc, "concat=n=%d:v=1:a=1[v][a]", len(starts))
} else {
fmt.Fprintf(&fc, "concat=n=%d:v=1:a=0[v]", len(starts))
}
args = append(args, "-filter_complex", fc.String())
// map outputs
args = append(args, "-map", "[v]")
if opts.Audio {
args = append(args, "-map", "[a]")
}
// Video encode (stash-like)
args = append(args,
"-c:v", "libx264",
"-pix_fmt", "yuv420p",
"-profile:v", "high",
"-level", "4.2",
"-preset", opts.Preset,
"-crf", strconv.Itoa(opts.CRF),
"-threads", "4",
)
if opts.UseVsync2 {
args = append(args, "-vsync", "2")
}
// Audio encode optional (stash-like 128k), plus dein -ac 2
if opts.Audio {
args = append(args,
"-c:a", "aac",
"-b:a", opts.AudioBitrate,
"-ac", "2",
"-shortest",
)
}
args = append(args, "-movflags", "+faststart", tmp)
cmd := exec.CommandContext(ctx, ffmpegPath, args...)
stdout, err := cmd.StdoutPipe()
if err != nil {
return err
}
var stderr bytes.Buffer
cmd.Stderr = &stderr
if err := cmd.Start(); err != nil {
return err
}
sc := bufio.NewScanner(stdout)
sc.Buffer(make([]byte, 0, 64*1024), 1024*1024)
var lastSent float64
var lastAt time.Time
send := func(outSec float64, force bool) {
if onRatio == nil {
return
}
if expectedOutSec > 0 && outSec > 0 {
r := outSec / expectedOutSec
if r < 0 {
r = 0
}
if r > 1 {
r = 1
}
if r-lastSent < 0.01 && !force {
return
}
if !lastAt.IsZero() && time.Since(lastAt) < 150*time.Millisecond && !force {
return
}
lastSent = r
lastAt = time.Now()
onRatio(r)
return
}
if force {
onRatio(1)
}
}
var outSec float64
for sc.Scan() {
line := strings.TrimSpace(sc.Text())
if line == "" {
continue
}
parts := strings.SplitN(line, "=", 2)
if len(parts) != 2 {
continue
}
k, v := parts[0], parts[1]
switch k {
case "out_time_ms":
if n, perr := strconv.ParseInt(strings.TrimSpace(v), 10, 64); perr == nil && n > 0 {
outSec = float64(n) / 1_000_000.0
send(outSec, false)
}
case "out_time":
if s := parseFFmpegOutTime(v); s > 0 {
outSec = s
send(outSec, false)
}
case "progress":
if strings.TrimSpace(v) == "end" {
send(outSec, true)
}
}
}
if err := cmd.Wait(); err != nil {
_ = os.Remove(tmp)
return fmt.Errorf("ffmpeg teaser preview failed: %v (%s)", err, strings.TrimSpace(stderr.String()))
}
_ = os.Remove(outPath)
return os.Rename(tmp, outPath)
}

View File

@ -1,728 +0,0 @@
// backend\preview_webp.go
package main
import (
"bytes"
"context"
"fmt"
"net/http"
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
"time"
)
// ------------------------------------------------------------
// Frame extraction helpers (WebP only)
// ------------------------------------------------------------
// extractLastFrameWebP extrahiert ein WebP aus dem letzten Frame der Datei.
func extractLastFrameWebP(path string) ([]byte, error) {
cmd := exec.Command(
ffmpegPath,
"-hide_banner",
"-loglevel", "error",
"-sseof", "-0.1",
"-i", path,
"-frames:v", "1",
"-vf", "scale=720:-2",
"-quality", "75",
"-f", "image2pipe",
"-vcodec", "libwebp",
"pipe:1",
)
var out bytes.Buffer
var stderr bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg last-frame webp: %w (%s)", err, strings.TrimSpace(stderr.String()))
}
b := out.Bytes()
if len(b) == 0 {
return nil, fmt.Errorf("ffmpeg last-frame webp: empty output")
}
return b, nil
}
// extractFrameAtTimeWebP extrahiert ein WebP an einer Zeitposition (Sekunden).
func extractFrameAtTimeWebP(path string, seconds float64) ([]byte, error) {
if seconds < 0 {
seconds = 0
}
seek := fmt.Sprintf("%.3f", seconds)
cmd := exec.Command(
ffmpegPath,
"-hide_banner",
"-loglevel", "error",
"-ss", seek,
"-i", path,
"-frames:v", "1",
"-vf", "scale=720:-2",
"-quality", "75",
"-f", "image2pipe",
"-vcodec", "libwebp",
"pipe:1",
)
var out bytes.Buffer
var stderr bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg frame-at-time webp: %w (%s)", err, strings.TrimSpace(stderr.String()))
}
b := out.Bytes()
if len(b) == 0 {
return nil, fmt.Errorf("ffmpeg frame-at-time webp: empty output")
}
return b, nil
}
// extractLastFrameWebPScaled extrahiert ein WebP aus dem letzten Frame und skaliert auf width (Höhe automatisch).
// quality: 0..100 (ffmpeg -quality)
func extractLastFrameWebPScaled(path string, width int, quality int) ([]byte, error) {
if width <= 0 {
width = 320
}
if quality <= 0 || quality > 100 {
quality = 70
}
cmd := exec.Command(
ffmpegPath,
"-hide_banner", "-loglevel", "error",
"-sseof", "-0.25",
"-i", path,
"-frames:v", "1",
"-vf", fmt.Sprintf("scale=%d:-2", width),
"-quality", strconv.Itoa(quality),
"-f", "image2pipe",
"-vcodec", "libwebp",
"pipe:1",
)
var out bytes.Buffer
var stderr bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg last-frame scaled webp: %w (%s)", err, strings.TrimSpace(stderr.String()))
}
b := out.Bytes()
if len(b) == 0 {
return nil, fmt.Errorf("ffmpeg last-frame scaled webp: empty output")
}
return b, nil
}
// extractFirstFrameWebPScaled extrahiert ein WebP aus dem ersten Frame und skaliert auf width.
func extractFirstFrameWebPScaled(path string, width int, quality int) ([]byte, error) {
if width <= 0 {
width = 320
}
if quality <= 0 || quality > 100 {
quality = 70
}
cmd := exec.Command(
ffmpegPath,
"-hide_banner", "-loglevel", "error",
"-ss", "0",
"-i", path,
"-frames:v", "1",
"-vf", fmt.Sprintf("scale=%d:-2", width),
"-quality", strconv.Itoa(quality),
"-f", "image2pipe",
"-vcodec", "libwebp",
"pipe:1",
)
var out bytes.Buffer
var stderr bytes.Buffer
cmd.Stdout = &out
cmd.Stderr = &stderr
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("ffmpeg first-frame scaled webp: %w (%s)", err, strings.TrimSpace(stderr.String()))
}
b := out.Bytes()
if len(b) == 0 {
return nil, fmt.Errorf("ffmpeg first-frame scaled webp: empty output")
}
return b, nil
}
// sucht das "neueste" Preview-Segment (seg_low_XXXXX.ts / seg_hq_XXXXX.ts)
func latestPreviewSegment(previewDir string) (string, error) {
entries, err := os.ReadDir(previewDir)
if err != nil {
return "", err
}
var best string
for _, e := range entries {
if e.IsDir() {
continue
}
name := e.Name()
if !strings.HasPrefix(name, "seg_low_") && !strings.HasPrefix(name, "seg_hq_") {
continue
}
if best == "" || name > best {
best = name
}
}
if best == "" {
return "", fmt.Errorf("kein Preview-Segment in %s", previewDir)
}
return filepath.Join(previewDir, best), nil
}
// extractLastFrameFromPreviewDirThumbWebP erzeugt ein kleines WebP aus dem letzten Preview-Segment.
func extractLastFrameFromPreviewDirThumbWebP(previewDir string) ([]byte, error) {
seg, err := latestPreviewSegment(previewDir)
if err != nil {
return nil, err
}
// low-res, notfalls fallback auf erstes Frame
img, err := extractLastFrameWebPScaled(seg, 320, 70)
if err == nil && len(img) > 0 {
return img, nil
}
return extractFirstFrameWebPScaled(seg, 320, 70)
}
// extractLastFrameFromPreviewDirWebP erzeugt ein WebP aus dem letzten Preview-Segment.
func extractLastFrameFromPreviewDirWebP(previewDir string) ([]byte, error) {
seg, err := latestPreviewSegment(previewDir)
if err != nil {
return nil, err
}
img, err := extractLastFrameWebP(seg)
if err != nil {
// extractFirstFrameWebP muss bei dir existieren oder du implementierst es analog wie oben;
// wenn du es nicht hast, nimm scaled-first als fallback.
return extractFirstFrameWebPScaled(seg, 720, 75)
}
return img, nil
}
// ------------------------------------------------------------
// Preview serving (webp only)
// ------------------------------------------------------------
func serveLivePreviewWebPFile(w http.ResponseWriter, r *http.Request, path string) {
f, err := os.Open(path)
if err != nil {
http.NotFound(w, r)
return
}
defer f.Close()
st, err := f.Stat()
if err != nil || st.IsDir() || st.Size() == 0 {
http.NotFound(w, r)
return
}
w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "no-store")
http.ServeContent(w, r, "preview.webp", st.ModTime(), f)
}
func servePreviewWebPFile(w http.ResponseWriter, r *http.Request, path string) {
f, err := os.Open(path)
if err != nil {
http.NotFound(w, r)
return
}
defer f.Close()
st, err := f.Stat()
if err != nil || st.IsDir() || st.Size() == 0 {
http.NotFound(w, r)
return
}
w.Header().Set("Content-Type", "image/webp")
// finished previews dürfen cachen
w.Header().Set("Cache-Control", "public, max-age=600")
http.ServeContent(w, r, filepath.Base(path), st.ModTime(), f)
}
func servePreviewWebPBytes(w http.ResponseWriter, b []byte) {
if len(b) == 0 {
w.WriteHeader(http.StatusNoContent)
return
}
w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "public, max-age=60")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(b)
}
func serveLivePreviewWebPBytes(w http.ResponseWriter, b []byte) {
if len(b) == 0 {
w.Header().Set("Cache-Control", "no-store")
w.WriteHeader(http.StatusNoContent)
return
}
w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "no-store")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(b)
}
// ------------------------------------------------------------
// Preview alias: preview.webp / preview.webp (webp only)
// ------------------------------------------------------------
func servePreviewWebPAlias(w http.ResponseWriter, r *http.Request, id string) {
// 1) Wenn Job bekannt (id = job.ID): assetID aus Output ableiten
jobsMu.Lock()
job := jobs[id]
jobsMu.Unlock()
if job != nil {
assetID := assetIDForJob(job)
if assetID != "" {
if webpPath, err := generatedThumbWebPFile(assetID); err == nil {
if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 {
if job.Status == JobRunning {
serveLivePreviewWebPFile(w, r, webpPath)
} else {
servePreviewWebPFile(w, r, webpPath)
}
return
}
}
}
// Optional: running in-memory fallback (nur WebP)
if job.Status == JobRunning {
job.previewMu.Lock()
cached := job.previewWebp
job.previewMu.Unlock()
if len(cached) > 0 {
serveLivePreviewWebPBytes(w, cached)
return
}
}
servePreviewStatusSVG(w, "Preview", http.StatusOK)
return
}
// 2) Kein Job im RAM: id als assetID behandeln (finished files nach Neustart)
assetID := stripHotPrefix(strings.TrimSpace(id))
if assetID == "" {
http.NotFound(w, r)
return
}
if webpPath, err := generatedThumbWebPFile(assetID); err == nil {
if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 {
servePreviewWebPFile(w, r, webpPath)
return
}
}
http.NotFound(w, r)
}
func isHover(r *http.Request) bool {
v := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("hover")))
return v == "1" || v == "true" || v == "yes"
}
func touchPreview(job *RecordJob) {
if job == nil {
return
}
jobsMu.Lock()
job.previewLastHit = time.Now()
jobsMu.Unlock()
}
func ensurePreviewStarted(r *http.Request, job *RecordJob) {
if job == nil {
return
}
job.previewStartMu.Lock()
defer job.previewStartMu.Unlock()
jobsMu.Lock()
// läuft schon?
if job.previewCmd != nil && job.PreviewDir != "" {
job.previewLastHit = time.Now()
jobsMu.Unlock()
return
}
// brauchen M3U8 URL
m3u8 := strings.TrimSpace(job.PreviewM3U8)
cookie := strings.TrimSpace(job.PreviewCookie)
ua := strings.TrimSpace(job.PreviewUA)
jobsMu.Unlock()
if m3u8 == "" {
return
}
// eigener Context für Preview (WICHTIG: nicht der Recording ctx)
pctx, cancel := context.WithCancel(context.Background())
// PreviewDir temp
assetID := assetIDForJob(job)
pdir := filepath.Join(os.TempDir(), "rec_preview", assetID)
jobsMu.Lock()
job.PreviewDir = pdir
job.previewCancel = cancel
job.previewLastHit = time.Now()
jobsMu.Unlock()
_ = startPreviewHLS(pctx, job, m3u8, pdir, cookie, ua)
}
func recordPreview(w http.ResponseWriter, r *http.Request) {
// nur GET/HEAD erlauben
if r.Method != http.MethodGet && r.Method != http.MethodHead {
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
return
}
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
// Alias: Frontend schickt "name"
id = strings.TrimSpace(r.URL.Query().Get("name"))
}
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
// Image / HLS file requests abfangen
if file := strings.TrimSpace(r.URL.Query().Get("file")); file != "" {
low := strings.ToLower(file)
// ✅ NUR WEBP
if low == "preview.webp" || low == "preview.webp" {
servePreviewWebPAlias(w, r, id)
return
}
// HLS wie gehabt
servePreviewHLSFile(w, r, id, file)
return
}
// Schauen, ob wir einen Job mit dieser ID kennen (laufend oder gerade fertig)
jobsMu.Lock()
job, ok := jobs[id]
jobsMu.Unlock()
if ok {
// ✅ 0) Running: wenn generated/<assetID>/preview.webp existiert -> sofort ausliefern
// (kein ffmpeg pro HTTP-Request)
if job.Status == JobRunning {
assetID := assetIDForJob(job)
if assetID != "" {
if webpPath, err := generatedThumbWebPFile(assetID); err == nil {
if st, err := os.Stat(webpPath); err == nil && !st.IsDir() && st.Size() > 0 {
serveLivePreviewWebPFile(w, r, webpPath)
return
}
}
}
}
// ✅ Fallback: In-Memory-Cache (falls preview.webp noch nicht da ist)
job.previewMu.Lock()
cached := job.previewWebp
cachedAt := job.previewWebpAt
freshWindow := 8 * time.Second
fresh := len(cached) > 0 && !cachedAt.IsZero() && time.Since(cachedAt) < freshWindow
// Wenn nicht frisch, ggf. im Hintergrund aktualisieren (einmal gleichzeitig)
if !fresh && !job.previewGen {
job.previewGen = true
go func(j *RecordJob, jobID string) {
defer func() {
j.previewMu.Lock()
j.previewGen = false
j.previewMu.Unlock()
}()
var img []byte
var genErr error
// 1) aus Preview-Segmenten
previewDir := strings.TrimSpace(j.PreviewDir)
if previewDir != "" {
img, genErr = extractLastFrameFromPreviewDirWebP(previewDir)
}
// 2) Fallback: aus der Ausgabedatei
if genErr != nil || len(img) == 0 {
outPath := strings.TrimSpace(j.Output)
if outPath != "" {
outPath = filepath.Clean(outPath)
if !filepath.IsAbs(outPath) {
if abs, err := resolvePathRelativeToApp(outPath); err == nil {
outPath = abs
}
}
if fi, err := os.Stat(outPath); err == nil && !fi.IsDir() && fi.Size() > 0 {
img, genErr = extractLastFrameWebP(outPath)
if genErr != nil {
// fallback: erster Frame skaliert
img, _ = extractFirstFrameWebPScaled(outPath, 720, 75)
}
}
}
}
if len(img) > 0 {
j.previewMu.Lock()
j.previewWebp = img
j.previewWebpAt = time.Now()
j.previewMu.Unlock()
}
}(job, id)
}
// Wir liefern entweder ein frisches Bild, oder das zuletzt gecachte.
out := cached
job.previewMu.Unlock()
if len(out) > 0 {
serveLivePreviewWebPBytes(w, out) // no-store für laufende Jobs
return
}
// Wenn Preview definitiv nicht geht -> Placeholder statt 204
jobsMu.Lock()
state := strings.TrimSpace(job.PreviewState)
jobsMu.Unlock()
if state == "private" {
servePreviewStatusSVG(w, "Private", http.StatusOK)
return
}
if state == "offline" {
servePreviewStatusSVG(w, "Offline", http.StatusOK)
return
}
// noch kein Bild verfügbar -> 204 (Frontend zeigt Placeholder und retry)
w.Header().Set("Cache-Control", "no-store")
w.WriteHeader(http.StatusNoContent)
return
}
// Kein Job im RAM → id als Dateistamm für fertige Downloads behandeln
servePreviewForFinishedFile(w, r, id)
}
// ------------------------------------------------------------
// Live thumbs generator (WebP)
// ------------------------------------------------------------
func updateLiveThumbWebPOnce(ctx context.Context, job *RecordJob) {
// Snapshot unter Lock holen
jobsMu.Lock()
status := job.Status
previewDir := job.PreviewDir
out := job.Output
jobsMu.Unlock()
if status != JobRunning {
return
}
// Zielpfad: generated/<assetID>/preview.webp
assetID := assetIDForJob(job)
thumbPath, err := generatedThumbWebPFile(assetID)
if err != nil {
return
}
// Wenn frisch genug: skip
if st, err := os.Stat(thumbPath); err == nil && st.Size() > 0 {
if time.Since(st.ModTime()) < 10*time.Second {
return
}
}
// Concurrency limit über thumbSem
if thumbSem != nil {
thumbCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
defer cancel()
if err := thumbSem.Acquire(thumbCtx); err != nil {
return
}
defer thumbSem.Release()
}
var img []byte
// 1) bevorzugt aus Preview-Segmenten
if previewDir != "" {
if b, err := extractLastFrameFromPreviewDirThumbWebP(previewDir); err == nil && len(b) > 0 {
img = b
}
}
// 2) fallback aus Output-Datei
if len(img) == 0 && out != "" {
if b, err := extractLastFrameWebPScaled(out, 320, 70); err == nil && len(b) > 0 {
img = b
}
}
if len(img) == 0 {
return
}
_ = atomicWriteFile(thumbPath, img)
}
func startLiveThumbWebPLoop(ctx context.Context, job *RecordJob) {
// einmalig starten
jobsMu.Lock()
if job.LiveThumbStarted {
jobsMu.Unlock()
return
}
job.LiveThumbStarted = true
jobsMu.Unlock()
go func() {
// sofort einmal versuchen
updateLiveThumbWebPOnce(ctx, job)
for {
delay := 10 * time.Second
select {
case <-ctx.Done():
return
case <-time.After(delay):
// Stoppen, sobald Job nicht mehr läuft
jobsMu.Lock()
st := job.Status
jobsMu.Unlock()
if st != JobRunning {
return
}
updateLiveThumbWebPOnce(ctx, job)
}
}
}()
}
// ------------------------------------------------------------
// Finished file preview (WebP only, no legacy jpg migration)
// ------------------------------------------------------------
func servePreviewForFinishedFile(w http.ResponseWriter, r *http.Request, id string) {
var err error
id, err = sanitizeID(id)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
outPath, err := findFinishedFileByID(id)
if err != nil {
http.Error(w, "preview nicht verfügbar", http.StatusNotFound)
return
}
if err := ensureGeneratedDirs(); err != nil {
http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError)
return
}
// Assets immer auf "basename ohne HOT" ablegen
assetID := stripHotPrefix(id)
if assetID == "" {
assetID = id
}
assetDir, err := ensureGeneratedDir(assetID)
if err != nil {
http.Error(w, "generated-dir nicht verfügbar: "+err.Error(), http.StatusInternalServerError)
return
}
// Frame-Caching für t=... (WebP)
if tStr := strings.TrimSpace(r.URL.Query().Get("t")); tStr != "" {
if sec, err := strconv.ParseFloat(tStr, 64); err == nil && sec >= 0 {
secI := int64(sec + 0.5)
if secI < 0 {
secI = 0
}
framePath := filepath.Join(assetDir, fmt.Sprintf("t_%d.webp", secI))
if fi, err := os.Stat(framePath); err == nil && !fi.IsDir() && fi.Size() > 0 {
servePreviewWebPFile(w, r, framePath)
return
}
img, err := extractFrameAtTimeWebP(outPath, float64(secI))
if err == nil && len(img) > 0 {
_ = atomicWriteFile(framePath, img)
servePreviewWebPBytes(w, img)
return
}
}
}
thumbPath := filepath.Join(assetDir, "preview.webp")
// 1) Cache hit
if fi, err := os.Stat(thumbPath); err == nil && !fi.IsDir() && fi.Size() > 0 {
servePreviewWebPFile(w, r, thumbPath)
return
}
// 2) Neu erzeugen
genCtx, cancel := context.WithTimeout(r.Context(), 30*time.Second)
defer cancel()
var t float64 = 0
if dur, derr := durationSecondsCached(genCtx, outPath); derr == nil && dur > 0 {
t = dur * 0.5
}
img, err := extractFrameAtTimeWebP(outPath, t)
if err != nil || len(img) == 0 {
img, err = extractLastFrameWebP(outPath)
if err != nil || len(img) == 0 {
// fallback: erster Frame skaliert
img, err = extractFirstFrameWebPScaled(outPath, 720, 75)
if err != nil || len(img) == 0 {
http.Error(w, "konnte preview nicht erzeugen", http.StatusInternalServerError)
return
}
}
}
_ = atomicWriteFile(thumbPath, img)
servePreviewWebPBytes(w, img)
}

File diff suppressed because it is too large Load Diff

View File

@ -1,105 +0,0 @@
package main
import (
"math"
"strings"
)
func setJobProgress(job *RecordJob, phase string, pct int) {
phase = strings.TrimSpace(phase)
phaseLower := strings.ToLower(phase)
// clamp pct 0..100
if pct < 0 {
pct = 0
}
if pct > 100 {
pct = 100
}
// "globale" Zielbereiche pro Phase (dein Pipeline-Modell)
// postwork wartet: 70..72
// remuxing: 72..78
// moving: 78..84
// probe: 84..86
// assets: 86..99
type rng struct{ start, end int }
rangeFor := func(ph string) rng {
switch ph {
case "postwork":
return rng{0, 5}
case "remuxing":
return rng{5, 65}
case "moving":
return rng{65, 75}
case "probe":
return rng{75, 80}
case "assets":
return rng{80, 99}
default:
return rng{0, 100}
}
}
jobsMu.Lock()
defer jobsMu.Unlock()
// Sobald Postwork läuft oder Aufnahme beendet ist -> Recorder darf NICHTS mehr überschreiben.
inPostwork := job.EndedAt != nil || (strings.TrimSpace(job.Phase) != "" && strings.ToLower(strings.TrimSpace(job.Phase)) != "recording")
if inPostwork {
// harte Blockade: alte recording-Updates dürfen weder Phase noch Progress anfassen
if phaseLower == "" || phaseLower == "recording" {
return
}
}
// Phase aktualisieren (aber nur wenn nicht leer)
if phase != "" {
job.Phase = phase
}
// ✅ Sonderfall: "wartet auf Nachbearbeitung" => Progress bleibt 0%
// Erwartung: Caller sendet phase="postwork" und pct=0 solange nur gewartet wird.
// Muss vor "niemals rückwärts" passieren, sonst käme man von Recording-Progress nicht mehr auf 0.
if phaseLower == "postwork" && pct == 0 {
job.Progress = 0
return
}
// Progress-Logik:
// - wenn wir in Postwork sind und jemand phasenlokale 0..100 liefert (z.B. remuxing 25),
// mappe das in den globalen Bereich der Phase.
// - danach: niemals rückwärts.
mapped := pct
if inPostwork {
r := rangeFor(phaseLower)
if r.end >= r.start {
// Heuristik:
// - Wenn pct bereits im globalen Bereich der Phase liegt => als global interpretieren, clampen.
// - Sonst => als lokales 0..100 interpretieren und in [start..end] mappen.
if pct >= r.start && pct <= r.end {
// schon global
mapped = pct
} else {
// lokal 0..100 -> global
width := float64(r.end - r.start)
mapped = r.start + int(math.Round((float64(pct)/100.0)*width))
}
// clamp in den Bereich
if mapped < r.start {
mapped = r.start
}
if mapped > r.end {
mapped = r.end
}
}
}
// niemals rückwärts
if mapped < job.Progress {
mapped = job.Progress
}
job.Progress = mapped
}

View File

@ -1,15 +1,90 @@
// backend\record_helpers_paths.go // backend/record_paths.go
package main package main
import ( import (
"fmt" "fmt"
"net/http" "net/http"
"net/url"
"os" "os"
"path"
"path/filepath" "path/filepath"
"strings" "strings"
) )
// ---------- Basic query helpers ----------
func q(r *http.Request, key string) string {
return strings.TrimSpace(r.URL.Query().Get(key))
}
// file query -> safe basename (no traversal) + url decode
func safeBasenameQuery(r *http.Request, key string) (string, bool, error) {
raw := strings.TrimSpace(r.URL.Query().Get(key))
if raw == "" {
return "", false, nil
}
dec, err := url.QueryUnescape(raw)
if err != nil {
return "", false, err
}
dec = strings.TrimSpace(dec)
if !isSafeBasename(dec) {
return "", false, fmt.Errorf("invalid basename")
}
return dec, true, nil
}
func isAllowedVideoExt(name string) bool {
ext := strings.ToLower(filepath.Ext(name))
return ext == ".mp4" || ext == ".ts"
}
// ---------- Safe path pieces ----------
func isSafeRelDir(rel string) bool {
rel = strings.TrimSpace(rel)
if rel == "" {
return false
}
// normalize to slash for validation
rel = filepath.ToSlash(rel)
if strings.HasPrefix(rel, "/") {
return false
}
clean := path.Clean(rel) // path.Clean => forward slashes
if clean == "." {
return true
}
if strings.HasPrefix(clean, "../") || clean == ".." {
return false
}
// prevent weird traversal
if strings.Contains(clean, `\`) {
return false
}
return true
}
func isSafeBasename(name string) bool {
name = strings.TrimSpace(name)
if name == "" {
return false
}
if strings.Contains(name, "/") || strings.Contains(name, "\\") {
return false
}
return filepath.Base(name) == name
}
func setNoStoreHeaders(w http.ResponseWriter) {
// verhindert Browser/Proxy Caching (wichtig für Logs/Status)
w.Header().Set("Cache-Control", "no-store, max-age=0")
w.Header().Set("Pragma", "no-cache")
w.Header().Set("Expires", "0")
}
// ---------- Resolve dirs ----------
func resolvePathRelativeToApp(p string) (string, error) { func resolvePathRelativeToApp(p string) (string, error) {
p = strings.TrimSpace(p) p = strings.TrimSpace(p)
if p == "" { if p == "" {
@ -84,14 +159,15 @@ func getDoneDir() string {
return strings.TrimSpace(s.DoneDir) return strings.TrimSpace(s.DoneDir)
} }
// ---------- Finders ----------
func findVideoPath(file string) (string, error) { func findVideoPath(file string) (string, error) {
base := filepath.Base(file) // verhindert path traversal base := filepath.Base(file) // verhindert path traversal
// TODO: passe diese Root-Dirs an deine echten Pfade an:
roots := []string{ roots := []string{
getRecordingsDir(), // z.B. downloads/output root getRecordingsDir(),
getDoneDir(), // ✅ NEU: fertige Dateien liegen typischerweise hier getDoneDir(),
getKeepDir(), // keep root getKeepDir(),
} }
// 1) direkt in den Roots // 1) direkt in den Roots
@ -123,13 +199,6 @@ func findVideoPath(file string) (string, error) {
return "", os.ErrNotExist return "", os.ErrNotExist
} }
func setNoStoreHeaders(w http.ResponseWriter) {
// verhindert Browser/Proxy Caching (wichtig für Logs/Status)
w.Header().Set("Cache-Control", "no-store, max-age=0")
w.Header().Set("Pragma", "no-cache")
w.Header().Set("Expires", "0")
}
func findFileInDirOrOneLevelSubdirs(root string, file string, skipDirName string) (string, os.FileInfo, bool) { func findFileInDirOrOneLevelSubdirs(root string, file string, skipDirName string) (string, os.FileInfo, bool) {
// direct // direct
p := filepath.Join(root, file) p := filepath.Join(root, file)
@ -183,6 +252,8 @@ func durationFromMetaIfFresh(videoPath, assetDir string, fi os.FileInfo) (float6
return readVideoMetaDuration(metaPath, fi) return readVideoMetaDuration(metaPath, fi)
} }
// durationSecondsCacheOnly returns a cached duration if available and still fresh.
// It relies on your existing durCache implementation elsewhere.
func durationSecondsCacheOnly(path string, fi os.FileInfo) float64 { func durationSecondsCacheOnly(path string, fi os.FileInfo) float64 {
durCache.mu.Lock() durCache.mu.Lock()
e, ok := durCache.m[path] e, ok := durCache.m[path]
@ -193,3 +264,83 @@ func durationSecondsCacheOnly(path string, fi os.FileInfo) float64 {
} }
return 0 return 0
} }
// ---------- Playback resolver (shared by video + scrubber/meta) ----------
// resolves a playable file path from ?file=... (done/keep/record) or ?id=... (jobs map)
// returns absolute cleaned path
func resolvePlayablePathFromQuery(r *http.Request) (string, bool, int, string) {
// returns: (path, ok, httpStatus, errMsg)
// 1) file mode
if file, ok, err := safeBasenameQuery(r, "file"); err != nil {
return "", false, http.StatusBadRequest, "ungültiger file"
} else if ok {
if !isAllowedVideoExt(file) {
return "", false, http.StatusForbidden, "nicht erlaubt"
}
s := getSettings()
recordAbs, err := resolvePathRelativeToApp(s.RecordDir)
if err != nil {
return "", false, http.StatusInternalServerError, "recordDir auflösung fehlgeschlagen: " + err.Error()
}
doneAbs, err := resolvePathRelativeToApp(s.DoneDir)
if err != nil {
return "", false, http.StatusInternalServerError, "doneDir auflösung fehlgeschlagen: " + err.Error()
}
// candidates: allow .ts and fallback to .mp4
ext := strings.ToLower(filepath.Ext(file))
names := []string{file}
if ext == ".ts" {
names = append(names, strings.TrimSuffix(file, ext)+".mp4")
}
for _, name := range names {
if p, _, ok := findFileInDirOrOneLevelSubdirs(doneAbs, name, "keep"); ok {
return filepath.Clean(strings.TrimSpace(p)), true, 0, ""
}
if p, _, ok := findFileInDirOrOneLevelSubdirs(filepath.Join(doneAbs, "keep"), name, ""); ok {
return filepath.Clean(strings.TrimSpace(p)), true, 0, ""
}
if p, _, ok := findFileInDirOrOneLevelSubdirs(recordAbs, name, ""); ok {
return filepath.Clean(strings.TrimSpace(p)), true, 0, ""
}
}
return "", false, http.StatusNotFound, "datei nicht gefunden"
}
// 2) id mode
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
return "", false, http.StatusBadRequest, "id fehlt"
}
jobsMu.Lock()
job, ok := jobs[id]
jobsMu.Unlock()
if !ok {
return "", false, http.StatusNotFound, "job nicht gefunden"
}
outPath := filepath.Clean(strings.TrimSpace(job.Output))
if outPath == "" {
return "", false, http.StatusNotFound, "output fehlt"
}
if !filepath.IsAbs(outPath) {
abs, err := resolvePathRelativeToApp(outPath)
if err != nil {
return "", false, http.StatusInternalServerError, "pfad auflösung fehlgeschlagen: " + err.Error()
}
outPath = abs
}
fi, err := os.Stat(outPath)
if err != nil || fi == nil || fi.IsDir() || fi.Size() == 0 {
return "", false, http.StatusNotFound, "datei nicht gefunden"
}
return outPath, true, 0, ""
}

View File

@ -1,122 +0,0 @@
package main
import (
"fmt"
"math"
"net/http"
"net/url"
"strconv"
"strings"
)
const defaultScrubberCount = 18
// /api/preview-scrubber/{index}?id=... (oder ?file=...)
func recordPreviewScrubberFrame(w http.ResponseWriter, r *http.Request) {
const prefix = "/api/preview-scrubber/"
if !strings.HasPrefix(r.URL.Path, prefix) {
http.NotFound(w, r)
return
}
idxPart := strings.Trim(strings.TrimPrefix(r.URL.Path, prefix), "/")
if idxPart == "" {
http.Error(w, "missing scrubber frame index", http.StatusBadRequest)
return
}
idx, err := strconv.Atoi(idxPart)
if err != nil || idx < 0 {
http.Error(w, "invalid scrubber frame index", http.StatusBadRequest)
return
}
// id oder file muss vorhanden sein (wie bei recordPreview / recordDoneMeta)
q := r.URL.Query()
id := strings.TrimSpace(q.Get("id"))
file := strings.TrimSpace(q.Get("file"))
if id == "" && file == "" {
http.Error(w, "missing id or file", http.StatusBadRequest)
return
}
// Dauer aus Meta ermitteln (WICHTIG für gleichmäßige Verteilung)
durSec, err := lookupDurationForScrubber(r, id, file)
if err != nil || durSec <= 0 {
// Fallback: wir versuchen trotzdem was Sinnvolles
// (z. B. 60s annehmen) besser als gar kein Bild
durSec = 60
}
// Count: gleich wie im Frontend (oder dynamisch, aber dann auch im Payload liefern!)
count := defaultScrubberCount
if idx >= count {
// wenn Frontend mehr sendet als Backend erwartet -> clamp
idx = count - 1
}
if count < 1 {
count = 1
}
t := scrubberIndexToTime(idx, count, durSec)
// An bestehenden Preview-Handler delegieren via Redirect
// recordPreview unterstützt bei dir bereits ?id=...&t=...
targetQ := url.Values{}
if id != "" {
targetQ.Set("id", id)
}
if file != "" {
targetQ.Set("file", file)
}
targetQ.Set("t", fmt.Sprintf("%.3f", t))
// Cache freundlich (optional feinjustieren)
w.Header().Set("Cache-Control", "private, max-age=300")
http.Redirect(w, r, "/api/preview?"+targetQ.Encode(), http.StatusFound)
}
// Gleichmäßig über die Videolänge sampeln (Mitte des Segments)
func scrubberIndexToTime(index, count int, durationSec float64) float64 {
if count <= 1 {
return 0.1
}
if durationSec <= 0 {
return 0.1
}
// nicht exakt bei 0 / nicht exakt am Ende
maxT := math.Max(0.1, durationSec-0.1)
ratio := (float64(index) + 0.5) / float64(count)
t := ratio * maxT
if t < 0.1 {
t = 0.1
}
if t > maxT {
t = maxT
}
return t
}
// TODO: Hier deine bestehende Meta-Lookup-Logik aus recordDoneMeta wiederverwenden.
// Ziel: durationSeconds aus meta.json / job-meta lesen.
// Diese Funktion ist der einzige Teil, den du an dein Projekt anpassen musst.
func lookupDurationForScrubber(r *http.Request, id, file string) (float64, error) {
// ------------------------------------------------------------
// OPTION A (empfohlen): dieselbe interne Funktion nutzen wie recordDoneMeta
// Beispiel (PSEUDO):
//
// meta, err := loadDoneMetaByIDOrFile(id, file)
// if err != nil { return 0, err }
// if d := meta.DurationSeconds; d > 0 { return d, nil }
//
// ------------------------------------------------------------
// ------------------------------------------------------------
// OPTION B: Wenn du aktuell keine Helper-Funktion hast:
// erstmal Fehler zurückgeben und später konkret anschließen.
// ------------------------------------------------------------
return 0, fmt.Errorf("lookupDurationForScrubber not wired yet")
}

View File

@ -1,67 +0,0 @@
// backend\record_preview_sprite.go
package main
import (
"net/http"
"os"
"path/filepath"
"strings"
)
func recordPreviewSprite(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet && r.Method != http.MethodHead {
http.Error(w, "Nur GET/HEAD", http.StatusMethodNotAllowed)
return
}
// Unterstützt beide Prefixe (falls du mal testweise /api/preview-sprite/ nutzt)
id := strings.TrimPrefix(r.URL.Path, "/api/record/preview-sprite/")
if id == r.URL.Path {
id = strings.TrimPrefix(r.URL.Path, "/api/preview-sprite/")
}
id = strings.TrimSpace(id)
// Falls jemand versehentlich einen Slash am Ende schickt
id = strings.Trim(id, "/")
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
var err error
id, err = sanitizeID(id)
if err != nil {
http.Error(w, "ungültige id", http.StatusBadRequest)
return
}
dir, err := generatedDirForID(id)
if err != nil {
http.Error(w, "ungültige id", http.StatusBadRequest)
return
}
spritePath := filepath.Join(dir, "preview-sprite.webp")
fi, err := os.Stat(spritePath)
if err != nil || fi.IsDir() || fi.Size() <= 0 {
http.NotFound(w, r)
return
}
f, err := os.Open(spritePath)
if err != nil {
http.NotFound(w, r)
return
}
defer f.Close()
// Cachebar (du hängst im Frontend ?v=updatedAtUnix dran)
w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "private, max-age=31536000, immutable")
w.Header().Set("X-Content-Type-Options", "nosniff")
http.ServeContent(w, r, "preview-sprite.webp", fi.ModTime(), f)
}

View File

@ -1,5 +1,4 @@
// backend\record_start.go // backend/recorder.go
package main package main
import ( import (
@ -7,27 +6,259 @@ import (
"errors" "errors"
"fmt" "fmt"
"math" "math"
"net/http"
"net/url"
"os" "os"
"path/filepath" "path/filepath"
"strconv"
"strings" "strings"
"time" "time"
"github.com/google/uuid" "github.com/google/uuid"
) )
// ---------------- Progress mapping ----------------
func setJobProgress(job *RecordJob, phase string, pct int) {
phase = strings.TrimSpace(phase)
phaseLower := strings.ToLower(phase)
if pct < 0 {
pct = 0
}
if pct > 100 {
pct = 100
}
type rng struct{ start, end int }
rangeFor := func(ph string) rng {
switch ph {
case "postwork":
return rng{0, 5}
case "remuxing":
return rng{5, 65}
case "moving":
return rng{65, 75}
case "probe":
return rng{75, 80}
case "assets":
return rng{80, 99}
default:
return rng{0, 100}
}
}
jobsMu.Lock()
defer jobsMu.Unlock()
inPostwork := job.EndedAt != nil || (strings.TrimSpace(job.Phase) != "" && strings.ToLower(strings.TrimSpace(job.Phase)) != "recording")
if inPostwork {
if phaseLower == "" || phaseLower == "recording" {
return
}
}
if phase != "" {
job.Phase = phase
}
if phaseLower == "postwork" && pct == 0 {
job.Progress = 0
return
}
mapped := pct
if inPostwork {
r := rangeFor(phaseLower)
if r.end >= r.start {
if pct >= r.start && pct <= r.end {
mapped = pct
} else {
width := float64(r.end - r.start)
mapped = r.start + int(math.Round((float64(pct)/100.0)*width))
}
if mapped < r.start {
mapped = r.start
}
if mapped > r.end {
mapped = r.end
}
}
}
if mapped < job.Progress {
mapped = job.Progress
}
job.Progress = mapped
}
// ---------------- Preview scrubber ----------------
const defaultScrubberCount = 18
// /api/preview-scrubber/{index}?id=... (oder ?file=...)
func recordPreviewScrubberFrame(w http.ResponseWriter, r *http.Request) {
const prefix = "/api/preview-scrubber/"
if !strings.HasPrefix(r.URL.Path, prefix) {
http.NotFound(w, r)
return
}
idxPart := strings.Trim(strings.TrimPrefix(r.URL.Path, prefix), "/")
if idxPart == "" {
http.Error(w, "missing scrubber frame index", http.StatusBadRequest)
return
}
idx, err := strconv.Atoi(idxPart)
if err != nil || idx < 0 {
http.Error(w, "invalid scrubber frame index", http.StatusBadRequest)
return
}
q := r.URL.Query()
id := strings.TrimSpace(q.Get("id"))
file := strings.TrimSpace(q.Get("file"))
if id == "" && file == "" {
http.Error(w, "missing id or file", http.StatusBadRequest)
return
}
durSec, err := lookupDurationForScrubber(r)
if err != nil || durSec <= 0 {
durSec = 60
}
count := defaultScrubberCount
if idx >= count {
idx = count - 1
}
if count < 1 {
count = 1
}
t := scrubberIndexToTime(idx, count, durSec)
targetQ := url.Values{}
if id != "" {
targetQ.Set("id", id)
}
if file != "" {
targetQ.Set("file", file)
}
targetQ.Set("t", fmt.Sprintf("%.3f", t))
w.Header().Set("Cache-Control", "private, max-age=300")
http.Redirect(w, r, "/api/preview?"+targetQ.Encode(), http.StatusFound)
}
// Gleichmäßig über die Videolänge sampeln (Mitte des Segments)
func scrubberIndexToTime(index, count int, durationSec float64) float64 {
if count <= 1 {
return 0.1
}
if durationSec <= 0 {
return 0.1
}
maxT := math.Max(0.1, durationSec-0.1)
ratio := (float64(index) + 0.5) / float64(count)
t := ratio * maxT
if t < 0.1 {
t = 0.1
}
if t > maxT {
t = maxT
}
return t
}
func lookupDurationForScrubber(r *http.Request) (float64, error) {
path, ok, _, _ := resolvePlayablePathFromQuery(r)
if !ok || strings.TrimSpace(path) == "" {
return 0, fmt.Errorf("unable to resolve file")
}
// best-effort meta
ensureMetaJSONForPlayback(r.Context(), path)
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
sec, err := durationSecondsCached(ctx, path)
if err != nil {
return 0, err
}
return sec, nil
}
// ---------------- Preview sprite file handler ----------------
func recordPreviewSprite(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet && r.Method != http.MethodHead {
http.Error(w, "Nur GET/HEAD", http.StatusMethodNotAllowed)
return
}
id := strings.TrimPrefix(r.URL.Path, "/api/record/preview-sprite/")
if id == r.URL.Path {
id = strings.TrimPrefix(r.URL.Path, "/api/preview-sprite/")
}
id = strings.TrimSpace(id)
id = strings.Trim(id, "/")
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
var err error
id, err = sanitizeID(id)
if err != nil {
http.Error(w, "ungültige id", http.StatusBadRequest)
return
}
dir, err := generatedDirForID(id)
if err != nil {
http.Error(w, "ungültige id", http.StatusBadRequest)
return
}
spritePath := filepath.Join(dir, "preview-sprite.webp")
fi, err := os.Stat(spritePath)
if err != nil || fi.IsDir() || fi.Size() <= 0 {
http.NotFound(w, r)
return
}
f, err := os.Open(spritePath)
if err != nil {
http.NotFound(w, r)
return
}
defer f.Close()
w.Header().Set("Content-Type", "image/webp")
w.Header().Set("Cache-Control", "private, max-age=31536000, immutable")
w.Header().Set("X-Content-Type-Options", "nosniff")
http.ServeContent(w, r, "preview-sprite.webp", fi.ModTime(), f)
}
// ---------------- Start + run job ----------------
func startRecordingInternal(req RecordRequest) (*RecordJob, error) { func startRecordingInternal(req RecordRequest) (*RecordJob, error) {
url := strings.TrimSpace(req.URL) url := strings.TrimSpace(req.URL)
if url == "" { if url == "" {
return nil, errors.New("url fehlt") return nil, errors.New("url fehlt")
} }
// Duplicate-running guard (identische URL)
jobsMu.Lock() jobsMu.Lock()
for _, j := range jobs { for _, j := range jobs {
// ✅ Nur blocken, solange wirklich noch aufgenommen wird.
// Sobald EndedAt gesetzt ist (Postwork/Queue läuft), darf ein neuer Download starten.
if j != nil && j.Status == JobRunning && j.EndedAt == nil && strings.TrimSpace(j.SourceURL) == url { if j != nil && j.Status == JobRunning && j.EndedAt == nil && strings.TrimSpace(j.SourceURL) == url {
// ✅ Wenn ein versteckter Auto-Check-Job läuft und der User manuell startet -> sofort sichtbar machen
if j.Hidden && !req.Hidden { if j.Hidden && !req.Hidden {
j.Hidden = false j.Hidden = false
jobsMu.Unlock() jobsMu.Unlock()
@ -41,11 +272,9 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) {
} }
} }
// ✅ Timestamp + Output schon hier setzen, damit UI sofort Model/Filename/Details hat
startedAt := time.Now() startedAt := time.Now()
provider := detectProvider(url) provider := detectProvider(url)
// best-effort Username aus URL
username := "" username := ""
switch provider { switch provider {
case "chaturbate": case "chaturbate":
@ -57,10 +286,8 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) {
username = "unknown" username = "unknown"
} }
// Dateiname (konsistent zu runJob: gleicher Timestamp)
filename := fmt.Sprintf("%s_%s.ts", username, startedAt.Format("01_02_2006__15-04-05")) filename := fmt.Sprintf("%s_%s.ts", username, startedAt.Format("01_02_2006__15-04-05"))
// best-effort: absoluter RecordDir (fallback auf Settings-Wert)
s := getSettings() s := getSettings()
recordDirAbs, _ := resolvePathRelativeToApp(s.RecordDir) recordDirAbs, _ := resolvePathRelativeToApp(s.RecordDir)
recordDir := strings.TrimSpace(recordDirAbs) recordDir := strings.TrimSpace(recordDirAbs)
@ -77,7 +304,7 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) {
SourceURL: url, SourceURL: url,
Status: JobRunning, Status: JobRunning,
StartedAt: startedAt, StartedAt: startedAt,
StartedAtMs: startedAt.UnixMilli(), // ✅ NEU StartedAtMs: startedAt.UnixMilli(),
Output: outPath, Output: outPath,
Hidden: req.Hidden, Hidden: req.Hidden,
cancel: cancel, cancel: cancel,
@ -86,7 +313,6 @@ func startRecordingInternal(req RecordRequest) (*RecordJob, error) {
jobs[jobID] = job jobs[jobID] = job
jobsMu.Unlock() jobsMu.Unlock()
// ✅ NEU: Hidden-Jobs nicht sofort ins UI broadcasten
if !job.Hidden { if !job.Hidden {
notifyJobsChanged() notifyJobsChanged()
} }
@ -101,13 +327,11 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
var err error var err error
// ✅ nutze den Timestamp vom Job (damit Start/Output konsistent sind)
now := job.StartedAt now := job.StartedAt
if now.IsZero() { if now.IsZero() {
now = time.Now() now = time.Now()
} }
// ✅ falls StartedAtMs aus irgendeinem Grund leer ist
if job.StartedAtMs == 0 { if job.StartedAtMs == 0 {
base := job.StartedAt base := job.StartedAt
if base.IsZero() { if base.IsZero() {
@ -121,11 +345,9 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
jobsMu.Unlock() jobsMu.Unlock()
} }
// ✅ Phase für Recording explizit setzen (damit spätere Progress-Writer das erkennen können)
setJobProgress(job, "recording", 0) setJobProgress(job, "recording", 0)
notifyJobsChanged() notifyJobsChanged()
// ---- Aufnahme starten (Output-Pfad sauber relativ zur EXE auflösen) ----
switch provider { switch provider {
case "chaturbate": case "chaturbate":
if !hasChaturbateCookies(req.Cookie) { if !hasChaturbateCookies(req.Cookie) {
@ -144,7 +366,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
username := extractUsername(req.URL) username := extractUsername(req.URL)
filename := fmt.Sprintf("%s_%s.ts", username, now.Format("01_02_2006__15-04-05")) filename := fmt.Sprintf("%s_%s.ts", username, now.Format("01_02_2006__15-04-05"))
// ✅ wenn Output schon beim Start gesetzt wurde, nutze ihn (falls absolut)
jobsMu.Lock() jobsMu.Lock()
existingOut := strings.TrimSpace(job.Output) existingOut := strings.TrimSpace(job.Output)
jobsMu.Unlock() jobsMu.Unlock()
@ -154,7 +375,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
outPath = filepath.Join(recordDirAbs, filename) outPath = filepath.Join(recordDirAbs, filename)
} }
// Output nur aktualisieren, wenn es sich ändert
if strings.TrimSpace(existingOut) != strings.TrimSpace(outPath) { if strings.TrimSpace(existingOut) != strings.TrimSpace(outPath) {
jobsMu.Lock() jobsMu.Lock()
job.Output = outPath job.Output = outPath
@ -192,10 +412,8 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
fmt.Println("❌ [record]", provider, job.SourceURL, "->", err) fmt.Println("❌ [record]", provider, job.SourceURL, "->", err)
} }
// ---- Recording fertig: EndedAt/Error setzen ----
end := time.Now() end := time.Now()
// Zielstatus bestimmen (finaler Status wird erst NACH Postwork gesetzt!)
target := JobFinished target := JobFinished
var errText string var errText string
if err != nil { if err != nil {
@ -207,25 +425,19 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
} }
} }
// direkt nach provider record endet (egal ob err != nil oder nil)
stopPreview(job) stopPreview(job)
// EndedAt + Error speichern (kurz locken)
jobsMu.Lock() jobsMu.Lock()
job.EndedAt = &end job.EndedAt = &end
job.EndedAtMs = end.UnixMilli() // ✅ NEU job.EndedAtMs = end.UnixMilli()
if errText != "" { if errText != "" {
job.Error = errText job.Error = errText
} }
// ✅ WICHTIG: sofort Phase wechseln, damit Recorder-Progress danach nichts mehr “zurücksetzt”
job.Phase = "postwork" job.Phase = "postwork"
out := strings.TrimSpace(job.Output) out := strings.TrimSpace(job.Output)
jobsMu.Unlock() jobsMu.Unlock()
notifyJobsChanged() notifyJobsChanged()
// Falls Output fehlt (z.B. provider error), direkt final status setzen
if out == "" { if out == "" {
jobsMu.Lock() jobsMu.Lock()
job.Status = target job.Status = target
@ -239,17 +451,13 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
return return
} }
// ✅ NEU: Bevor Postwork queued wird -> kleine Downloads direkt löschen // pre-queue auto delete (small)
// (spart Remux/Move/ffprobe/assets komplett)
{ {
s := getSettings() s := getSettings()
minMB := s.AutoDeleteSmallDownloadsBelowMB minMB := s.AutoDeleteSmallDownloadsBelowMB
if s.AutoDeleteSmallDownloads && minMB > 0 { if s.AutoDeleteSmallDownloads && minMB > 0 {
threshold := int64(minMB) * 1024 * 1024 threshold := int64(minMB) * 1024 * 1024
// out ist i.d.R. absolut; Stat ist cheap
if fi, serr := os.Stat(out); serr == nil && fi != nil && !fi.IsDir() { if fi, serr := os.Stat(out); serr == nil && fi != nil && !fi.IsDir() {
// Size auch ins Job-JSON schreiben (nice fürs UI, selbst wenn wir danach löschen)
jobsMu.Lock() jobsMu.Lock()
job.SizeBytes = fi.Size() job.SizeBytes = fi.Size()
jobsMu.Unlock() jobsMu.Unlock()
@ -263,7 +471,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
removeGeneratedForID(id) removeGeneratedForID(id)
purgeDurationCacheForPath(out) purgeDurationCacheForPath(out)
// Job komplett entfernen (wie dein späterer Auto-Delete-Block)
jobsMu.Lock() jobsMu.Lock()
delete(jobs, job.ID) delete(jobs, job.ID)
jobsMu.Unlock() jobsMu.Unlock()
@ -274,32 +481,23 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
if shouldLogRecordInfo(req) { if shouldLogRecordInfo(req) {
fmt.Println("🧹 auto-deleted (pre-queue):", base, "(size: "+formatBytesSI(fi.Size())+")") fmt.Println("🧹 auto-deleted (pre-queue):", base, "(size: "+formatBytesSI(fi.Size())+")")
} }
return return
} else { } else {
fmt.Println("⚠️ auto-delete (pre-queue) failed:", derr) fmt.Println("⚠️ auto-delete (pre-queue) failed:", derr)
// wenn delete fehlschlägt -> normal weiter in Postwork
} }
} }
} }
} }
} }
// ✅ Postwork: remux/move/ffprobe/assets begrenzen -> in Queue // postwork queue
postOut := out postOut := out
postTarget := target postTarget := target
postKey := "postwork:" + job.ID postKey := "postwork:" + job.ID
// ✅ WICHTIG:
// - Status noch NICHT auf JobStopped/JobFinished setzen, sonst verschwindet er aus der Downloads-Tabelle.
// - Stattdessen Phase "postwork" + Progress hochsetzen (monoton).
// - Zusätzlich: PostWorkKey setzen + initialen Queue-Status ins Job-JSON hängen.
jobsMu.Lock() jobsMu.Lock()
job.Phase = "postwork" job.Phase = "postwork"
job.PostWorkKey = postKey job.PostWorkKey = postKey
// initialer Status (meist "missing", bis Enqueue done ist wir updaten direkt danach nochmal)
{ {
s := postWorkQ.StatusForKey(postKey) s := postWorkQ.StatusForKey(postKey)
job.PostWork = &s job.PostWork = &s
@ -311,19 +509,14 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
Key: postKey, Key: postKey,
Added: time.Now(), Added: time.Now(),
Run: func(ctx context.Context) error { Run: func(ctx context.Context) error {
// beim Start: Queue-Status refresh (sollte jetzt "running" werden)
{ {
st := postWorkQ.StatusForKey(postKey) st := postWorkQ.StatusForKey(postKey)
jobsMu.Lock() jobsMu.Lock()
job.PostWork = &st job.PostWork = &st
jobsMu.Unlock() jobsMu.Unlock()
// optisches "queued" bumping
setJobProgress(job, "postwork", 0) setJobProgress(job, "postwork", 0)
notifyJobsChanged() notifyJobsChanged()
} }
out := strings.TrimSpace(postOut) out := strings.TrimSpace(postOut)
@ -340,21 +533,16 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
return nil return nil
} }
// Helper: Progress nur nach oben (gegen "rückwärts")
setPhase := func(phase string, pct int) { setPhase := func(phase string, pct int) {
// Phase+Progress inkl. Mapping/Monotonie
setJobProgress(job, phase, pct) setJobProgress(job, phase, pct)
// Queue-Status aktuell halten
st := postWorkQ.StatusForKey(postKey) st := postWorkQ.StatusForKey(postKey)
jobsMu.Lock() jobsMu.Lock()
job.PostWork = &st job.PostWork = &st
jobsMu.Unlock() jobsMu.Unlock()
notifyJobsChanged() notifyJobsChanged()
} }
// 1) Remux (nur wenn TS) // 1) Remux
if strings.EqualFold(filepath.Ext(out), ".ts") { if strings.EqualFold(filepath.Ext(out), ".ts") {
setPhase("remuxing", 72) setPhase("remuxing", 72)
if newOut, err2 := maybeRemuxTSForJob(job, out); err2 == nil && strings.TrimSpace(newOut) != "" { if newOut, err2 := maybeRemuxTSForJob(job, out); err2 == nil && strings.TrimSpace(newOut) != "" {
@ -366,7 +554,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
} }
} }
// 2) Move to done (best-effort) // 2) Move to done
setPhase("moving", 78) setPhase("moving", 78)
if moved, err2 := moveToDoneDir(out); err2 == nil && strings.TrimSpace(moved) != "" { if moved, err2 := moveToDoneDir(out); err2 == nil && strings.TrimSpace(moved) != "" {
out = strings.TrimSpace(moved) out = strings.TrimSpace(moved)
@ -377,7 +565,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
notifyDoneChanged() notifyDoneChanged()
} }
// 3) Dauer (ffprobe) // 3) Duration
setPhase("probe", 84) setPhase("probe", 84)
{ {
dctx, cancel := context.WithTimeout(ctx, 6*time.Second) dctx, cancel := context.WithTimeout(ctx, 6*time.Second)
@ -390,13 +578,12 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
cancel() cancel()
} }
// 5) Video-Props // 4) Video props
setPhase("probe", 86) setPhase("probe", 86)
{ {
pctx, cancel := context.WithTimeout(ctx, 6*time.Second) pctx, cancel := context.WithTimeout(ctx, 6*time.Second)
w, h, fps, perr := probeVideoProps(pctx, out) w, h, fps, perr := probeVideoProps(pctx, out)
cancel() cancel()
if perr == nil { if perr == nil {
jobsMu.Lock() jobsMu.Lock()
job.VideoWidth = w job.VideoWidth = w
@ -407,7 +594,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
} }
} }
// 6) Assets (preview.webp + preview.mp4) // 5) Assets with progress
const ( const (
assetsStart = 86 assetsStart = 86
assetsEnd = 99 assetsEnd = 99
@ -425,7 +612,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
if r > 1 { if r > 1 {
r = 1 r = 1
} }
pct := assetsStart + int(math.Round(r*float64(assetsEnd-assetsStart))) pct := assetsStart + int(math.Round(r*float64(assetsEnd-assetsStart)))
if pct < assetsStart { if pct < assetsStart {
pct = assetsStart pct = assetsStart
@ -433,7 +619,6 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
if pct > assetsEnd { if pct > assetsEnd {
pct = assetsEnd pct = assetsEnd
} }
if pct == lastPct { if pct == lastPct {
return return
} }
@ -445,12 +630,12 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
setPhase("assets", pct) setPhase("assets", pct)
} }
if err := ensureAssetsForVideoWithProgress(out, job.SourceURL, update); err != nil { if _, err := ensureAssetsForVideoWithProgressCtx(ctx, out, job.SourceURL, update); err != nil {
fmt.Println("⚠️ ensureAssetsForVideo:", err) fmt.Println("⚠️ ensureAssetsForVideo:", err)
} }
setPhase("assets", assetsEnd) setPhase("assets", assetsEnd)
// 7) Finalize: JETZT finalen Status setzen (damit er erst dann aus Downloads verschwindet) // Finalize
jobsMu.Lock() jobsMu.Lock()
job.Status = postTarget job.Status = postTarget
job.Phase = "" job.Phase = ""
@ -460,20 +645,17 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
jobsMu.Unlock() jobsMu.Unlock()
notifyJobsChanged() notifyJobsChanged()
notifyDoneChanged() notifyDoneChanged()
return nil return nil
}, },
}) })
if okQueued { if okQueued {
// ✅ direkt nach erfolgreichem Enqueue nochmal Status holen (nun "queued" + Position möglich)
st := postWorkQ.StatusForKey(postKey) st := postWorkQ.StatusForKey(postKey)
jobsMu.Lock() jobsMu.Lock()
job.PostWork = &st job.PostWork = &st
jobsMu.Unlock() jobsMu.Unlock()
notifyJobsChanged() notifyJobsChanged()
} else { } else {
// Queue voll -> Fallback: finalisieren
jobsMu.Lock() jobsMu.Lock()
job.Status = postTarget job.Status = postTarget
job.Phase = "" job.Phase = ""
@ -484,6 +666,4 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
notifyJobsChanged() notifyJobsChanged()
notifyDoneChanged() notifyDoneChanged()
} }
return
} }

View File

@ -47,6 +47,7 @@ func registerRoutes(mux *http.ServeMux, auth *AuthManager) *ModelStore {
api.HandleFunc("/api/record/status", recordStatus) api.HandleFunc("/api/record/status", recordStatus)
api.HandleFunc("/api/record/stop", recordStop) api.HandleFunc("/api/record/stop", recordStop)
api.HandleFunc("/api/preview", recordPreview) api.HandleFunc("/api/preview", recordPreview)
api.HandleFunc("/api/preview/live", recordPreviewLive)
api.HandleFunc("/api/preview-scrubber/", recordPreviewScrubberFrame) api.HandleFunc("/api/preview-scrubber/", recordPreviewScrubberFrame)
api.HandleFunc("/api/preview-sprite/", recordPreviewSprite) api.HandleFunc("/api/preview-sprite/", recordPreviewSprite)
api.HandleFunc("/api/record/list", recordList) api.HandleFunc("/api/record/list", recordList)
@ -136,11 +137,15 @@ func buildPostgresDSNFromSettings() (string, error) {
return "", fmt.Errorf("databaseUrl ungültig: %w", err) return "", fmt.Errorf("databaseUrl ungültig: %w", err)
} }
// 1) Wenn URL bereits Passwort enthält -> direkt verwenden // 1) Wenn URL bereits Passwort enthält -> nur verwenden, wenn es NICHT der Placeholder ist
if u.User != nil { if u.User != nil {
if _, hasPw := u.User.Password(); hasPw { if pw, hasPw := u.User.Password(); hasPw {
pw = strings.TrimSpace(pw)
if pw != "" && pw != "****" {
return u.String(), nil return u.String(), nil
} }
// sonst: Placeholder -> ignorieren und unten aus EncryptedDBPassword einsetzen
}
} }
// 2) Passwort fehlt -> aus EncryptedDBPassword holen // 2) Passwort fehlt -> aus EncryptedDBPassword holen

View File

@ -140,6 +140,14 @@ func loadSettings() {
} }
} }
} }
// ✅ WICHTIG: Migrationsergebnis zurück in den globalen settings-State schreiben
settingsMu.Lock()
settings = s
settingsMu.Unlock()
// optional aber sinnvoll: Migration auch persistieren
saveSettingsToDisk()
} }
// Ordner sicherstellen // Ordner sicherstellen
@ -315,6 +323,10 @@ func recordSettingsHandler(w http.ResponseWriter, r *http.Request) {
// 2) Migration: wenn in.DatabaseURL ein Passwort enthält, extrahieren // 2) Migration: wenn in.DatabaseURL ein Passwort enthält, extrahieren
// und URL ohne Passwort zurückschreiben. // und URL ohne Passwort zurückschreiben.
sanitizedURL, pwFromURL := stripPasswordFromPostgresURL(in.DatabaseURL) sanitizedURL, pwFromURL := stripPasswordFromPostgresURL(in.DatabaseURL)
pwFromURL = strings.TrimSpace(pwFromURL)
if pwFromURL == "****" {
pwFromURL = ""
}
if sanitizedURL != "" { if sanitizedURL != "" {
in.DatabaseURL = sanitizedURL in.DatabaseURL = sanitizedURL
} }

View File

@ -1,13 +1,17 @@
// backend\cleanup.go // backend\tasks_cleanup.go
package main package main
import ( import (
"encoding/json" "encoding/json"
"fmt"
"io/fs"
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"sync/atomic"
"time"
) )
type cleanupResp struct { type cleanupResp struct {
@ -19,10 +23,6 @@ type cleanupResp struct {
DeletedBytesHuman string `json:"deletedBytesHuman"` DeletedBytesHuman string `json:"deletedBytesHuman"`
ErrorCount int `json:"errorCount"` ErrorCount int `json:"errorCount"`
// Orphans cleanup (previews/thumbs/generated ohne passende Video-Datei)
OrphanIDsScanned int `json:"orphanIdsScanned"`
OrphanIDsRemoved int `json:"orphanIdsRemoved"`
// ✅ NEU: Generated-GC separat (nicht in orphanIds reinmischen) // ✅ NEU: Generated-GC separat (nicht in orphanIds reinmischen)
GeneratedOrphansChecked int `json:"generatedOrphansChecked"` GeneratedOrphansChecked int `json:"generatedOrphansChecked"`
GeneratedOrphansRemoved int `json:"generatedOrphansRemoved"` GeneratedOrphansRemoved int `json:"generatedOrphansRemoved"`
@ -76,9 +76,6 @@ func settingsCleanupHandler(w http.ResponseWriter, r *http.Request) {
cleanupSmallFiles(doneAbs, threshold, &resp) cleanupSmallFiles(doneAbs, threshold, &resp)
} }
// 2) Orphans entfernen (immer sinnvoll, unabhängig von mb)
cleanupOrphanAssets(doneAbs, &resp)
// ✅ Beim manuellen Aufräumen: Generated-GC synchron laufen lassen, // ✅ Beim manuellen Aufräumen: Generated-GC synchron laufen lassen,
// damit die Zahlen in der JSON-Response landen. // damit die Zahlen in der JSON-Response landen.
gcStats := triggerGeneratedGarbageCollectorSync() gcStats := triggerGeneratedGarbageCollectorSync()
@ -151,8 +148,6 @@ func cleanupSmallFiles(doneAbs string, threshold int64, resp *cleanupResp) {
// generated + legacy cleanup (best effort) // generated + legacy cleanup (best effort)
if strings.TrimSpace(id) != "" { if strings.TrimSpace(id) != "" {
removeGeneratedForID(id) removeGeneratedForID(id)
_ = os.RemoveAll(filepath.Join(doneAbs, "preview", id))
_ = os.RemoveAll(filepath.Join(doneAbs, "thumbs", id))
} }
purgeDurationCacheForPath(p) purgeDurationCacheForPath(p)
@ -189,8 +184,6 @@ func cleanupSmallFiles(doneAbs string, threshold int64, resp *cleanupResp) {
if strings.TrimSpace(id) != "" { if strings.TrimSpace(id) != "" {
removeGeneratedForID(id) removeGeneratedForID(id)
_ = os.RemoveAll(filepath.Join(doneAbs, "preview", id))
_ = os.RemoveAll(filepath.Join(doneAbs, "thumbs", id))
} }
purgeDurationCacheForPath(full) purgeDurationCacheForPath(full)
@ -204,107 +197,123 @@ func cleanupSmallFiles(doneAbs string, threshold int64, resp *cleanupResp) {
scanDir(doneAbs, true) scanDir(doneAbs, true)
} }
// Orphans = Preview/Thumbs/Generated IDs, für die keine Video-Datei im doneAbs existiert. var generatedGCRunning int32
func cleanupOrphanAssets(doneAbs string, resp *cleanupResp) {
// 1) Existierende Video-IDs einsammeln
existingIDs := collectExistingVideoIDs(doneAbs)
// 2) Orphan-IDs aus preview/thumbs ermitteln type generatedGCStats struct {
previewDir := filepath.Join(doneAbs, "preview") Checked int
thumbsDir := filepath.Join(doneAbs, "thumbs") Removed int
ids := make(map[string]struct{})
addDirChildrenAsIDs := func(dir string) {
ents, err := os.ReadDir(dir)
if err != nil {
return
} }
for _, e := range ents {
// Läuft synchron und liefert Zahlen zurück (für /api/settings/cleanup Response).
func triggerGeneratedGarbageCollectorSync() generatedGCStats {
// nur 1 GC gleichzeitig
if !atomic.CompareAndSwapInt32(&generatedGCRunning, 0, 1) {
fmt.Println("🧹 [gc] skip: already running")
return generatedGCStats{}
}
defer atomic.StoreInt32(&generatedGCRunning, 0)
stats := runGeneratedGarbageCollector()
return stats
}
// Läuft 1× nach Serverstart (mit Delay), löscht /generated/* Orphans.
func startGeneratedGarbageCollector() {
go func() {
time.Sleep(3 * time.Second)
triggerGeneratedGarbageCollectorSync()
}()
}
// Core-Logik ohne Delay (für manuelle Trigger, z.B. nach Cleanup)
// Liefert Stats zurück, damit /api/settings/cleanup die Zahlen anzeigen kann.
func runGeneratedGarbageCollector() generatedGCStats {
stats := generatedGCStats{}
s := getSettings()
doneAbs, err := resolvePathRelativeToApp(s.DoneDir)
if err != nil {
fmt.Println("🧹 [gc] resolve doneDir failed:", err)
return stats
}
doneAbs = strings.TrimSpace(doneAbs)
if doneAbs == "" {
return stats
}
// 1) Live-IDs sammeln: alle mp4/ts unter /done (rekursiv), .trash ignorieren
live := make(map[string]struct{}, 4096)
_ = filepath.WalkDir(doneAbs, func(p string, d fs.DirEntry, err error) error {
if err != nil {
return nil
}
name := d.Name()
if d.IsDir() {
if strings.EqualFold(name, ".trash") {
return fs.SkipDir
}
return nil
}
ext := strings.ToLower(filepath.Ext(name))
if ext != ".mp4" && ext != ".ts" {
return nil
}
info, err := d.Info()
if err != nil || info.IsDir() || info.Size() <= 0 {
return nil
}
base := strings.TrimSuffix(name, ext)
id, err := sanitizeID(stripHotPrefix(base))
if err != nil || id == "" {
return nil
}
live[id] = struct{}{}
return nil
})
// 2) /generated/meta/<id> prüfen
metaRoot, err := generatedMetaRoot()
if err == nil {
metaRoot = strings.TrimSpace(metaRoot)
}
if err != nil || metaRoot == "" {
return stats
}
removedMeta := 0
checkedMeta := 0
if entries, err := os.ReadDir(metaRoot); err == nil {
for _, e := range entries {
if !e.IsDir() { if !e.IsDir() {
continue continue
} }
id := strings.TrimSpace(e.Name()) id := strings.TrimSpace(e.Name())
if id == "" { if id == "" || strings.HasPrefix(id, ".") {
continue continue
} }
ids[id] = struct{}{}
} checkedMeta++
} if _, ok := live[id]; ok {
addDirChildrenAsIDs(previewDir)
addDirChildrenAsIDs(thumbsDir)
resp.OrphanIDsScanned = len(ids)
// 3) Alles löschen, was nicht mehr existiert
for id := range ids {
if _, ok := existingIDs[id]; ok {
continue continue
} }
// remove generated artifacts (best effort)
removeGeneratedForID(id) removeGeneratedForID(id)
removedMeta++
// remove legacy preview/thumbs
_ = os.RemoveAll(filepath.Join(previewDir, id))
_ = os.RemoveAll(filepath.Join(thumbsDir, id))
resp.OrphanIDsRemoved++
} }
} }
func collectExistingVideoIDs(doneAbs string) map[string]struct{} { fmt.Printf("🧹 [gc] generated/meta checked=%d removed_orphans=%d\n", checkedMeta, removedMeta)
out := make(map[string]struct{}) stats.Checked += checkedMeta
stats.Removed += removedMeta
isCandidate := func(name string) bool { return stats
low := strings.ToLower(name)
if strings.Contains(low, ".part") || strings.Contains(low, ".tmp") {
return false
}
ext := strings.ToLower(filepath.Ext(name))
return ext == ".mp4" || ext == ".ts"
}
addFile := func(p string) {
name := filepath.Base(p)
if !isCandidate(name) {
return
}
base := strings.TrimSuffix(name, filepath.Ext(name))
id := stripHotPrefix(base)
id = strings.TrimSpace(id)
if id != "" {
out[id] = struct{}{}
}
}
// root + 1-level subdirs (skip keep)
ents, err := os.ReadDir(doneAbs)
if err != nil {
return out
}
for _, e := range ents {
full := filepath.Join(doneAbs, e.Name())
if e.IsDir() {
if e.Name() == "keep" {
continue
}
sub, err := os.ReadDir(full)
if err != nil {
continue
}
for _, se := range sub {
if se.IsDir() {
continue
}
addFile(filepath.Join(full, se.Name()))
}
continue
}
addFile(full)
}
return out
} }

View File

@ -16,7 +16,7 @@ import ModelDetails from './components/ui/ModelDetails'
import { SignalIcon, HeartIcon, HandThumbUpIcon, EyeIcon } from '@heroicons/react/24/solid' import { SignalIcon, HeartIcon, HandThumbUpIcon, EyeIcon } from '@heroicons/react/24/solid'
import PerformanceMonitor from './components/ui/PerformanceMonitor' import PerformanceMonitor from './components/ui/PerformanceMonitor'
import { useNotify } from './components/ui/notify' import { useNotify } from './components/ui/notify'
import { startChaturbateOnlinePolling } from './lib/chaturbateOnlinePoller' //import { startChaturbateOnlinePolling } from './lib/chaturbateOnlinePoller'
import CategoriesTab from './components/ui/CategoriesTab' import CategoriesTab from './components/ui/CategoriesTab'
import LoginPage from './components/ui/LoginPage' import LoginPage from './components/ui/LoginPage'
@ -258,6 +258,16 @@ export default function App() {
const [authChecked, setAuthChecked] = useState(false) const [authChecked, setAuthChecked] = useState(false)
const [authed, setAuthed] = useState(false) const [authed, setAuthed] = useState(false)
const sourceUrlInputRef = useRef<HTMLInputElement | null>(null)
const selectSourceUrl = useCallback(() => {
const el = sourceUrlInputRef.current
if (!el) return
// Fokus sicherstellen, dann alles markieren
el.focus()
// rAF, damit der Fokus sicher "sitzt" (und für Mobile/Safari stabiler)
requestAnimationFrame(() => el.select())
}, [])
const checkAuth = useCallback(async () => { const checkAuth = useCallback(async () => {
try { try {
@ -2401,288 +2411,6 @@ export default function App() {
} }
}, [autoAddEnabled, autoStartEnabled, enqueueStart]) }, [autoAddEnabled, autoStartEnabled, enqueueStart])
useEffect(() => {
const stop = startChaturbateOnlinePolling({
getModels: () => {
if (!recSettingsRef.current.useChaturbateApi) return []
const modelsMap = modelsByKeyRef.current
const pendingMap = pendingAutoStartByKeyRef.current
const watchedKeysLower = Object.values(modelsMap)
.filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate'))
.map((m) => String(m?.modelKey ?? '').trim().toLowerCase())
.filter(Boolean)
const queuedKeysLower = Object.keys(pendingMap || {})
.map((k) => String(k || '').trim().toLowerCase())
.filter(Boolean)
// ✅ NUR watched + queued pollen (Store kann riesig sein -> lag)
// Wenn du Store-Online später willst: extra, seltener Poll (z.B. 60s) separat lösen.
return Array.from(new Set([...watchedKeysLower, ...queuedKeysLower]))
},
getShow: () => ['public', 'private', 'hidden', 'away'],
intervalMs: 8000,
onData: (data: ChaturbateOnlineResponse) => {
void (async () => {
if (!data?.enabled) {
setCbOnlineByKeyLower({})
cbOnlineByKeyLowerRef.current = {}
lastCbShowByKeyLowerRef.current = {}
setPendingWatchedRooms([])
everCbOnlineByKeyLowerRef.current = {}
cbOnlineInitDoneRef.current = false
lastCbOnlineByKeyLowerRef.current = {}
setLastHeaderUpdateAtMs(Date.now())
return
}
const nextSnap: Record<string, ChaturbateOnlineRoom> = {}
for (const r of Array.isArray(data.rooms) ? data.rooms : []) {
const u = String(r?.username ?? '').trim().toLowerCase()
if (u) nextSnap[u] = r
}
setCbOnlineByKeyLower(nextSnap)
cbOnlineByKeyLowerRef.current = nextSnap
// ✅ Toasts: (A) watched offline->online, (B) waiting->public, (C) online->offline->online => "wieder online"
try {
const notificationsOn = Boolean((recSettingsRef.current as any).enableNotifications ?? true)
const waiting = new Set(['private', 'away', 'hidden'])
// watched-Keys (nur Chaturbate)
const watchedSetLower = new Set(
Object.values(modelsByKeyRef.current || {})
.filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate'))
.map((m) => String(m?.modelKey ?? '').trim().toLowerCase())
.filter(Boolean)
)
const prevShow = lastCbShowByKeyLowerRef.current || {}
const nextShowMap: Record<string, string> = { ...prevShow }
const prevOnline = lastCbOnlineByKeyLowerRef.current || {}
const isInitial = !cbOnlineInitDoneRef.current
// ✅ "war schon mal online" Snapshot (vor diesem Poll)
const everOnline = everCbOnlineByKeyLowerRef.current || {}
const nextEverOnline: Record<string, true> = { ...everOnline }
for (const [keyLower, room] of Object.entries(nextSnap)) {
const nowShow = String((room as any)?.current_show ?? '').toLowerCase().trim()
const beforeShow = String(prevShow[keyLower] ?? '').toLowerCase().trim()
const wasOnline = Boolean(prevOnline[keyLower])
const isOnline = true // weil es in nextSnap ist
const becameOnline = isOnline && !wasOnline
// ✅ war irgendwann schon mal online (vor diesem Poll)?
const hadEverBeenOnline = Boolean(everOnline[keyLower])
const modelName = String((room as any)?.username ?? keyLower).trim() || keyLower
const imageUrl = String((room as any)?.image_url ?? '').trim()
// immer merken: jetzt ist es online
nextEverOnline[keyLower] = true
// (B) waiting -> public => "wieder online" (höchste Priorität, damit kein Doppel-Toast)
const becamePublicFromWaiting = nowShow === 'public' && waiting.has(beforeShow)
if (becamePublicFromWaiting) {
if (notificationsOn) {
notify.info(modelName, 'ist wieder online.', {
imageUrl,
imageAlt: `${modelName} Vorschau`,
durationMs: 5500,
onClick: () => {
window.dispatchEvent(
new CustomEvent('open-model-details', {
detail: { modelKey: modelName },
})
)
},
})
}
if (nowShow) nextShowMap[keyLower] = nowShow
continue
}
// (A/C) watched: offline -> online
if (watchedSetLower.has(keyLower) && becameOnline) {
// C: online->offline->online => "wieder online"
const cameBackFromOffline = hadEverBeenOnline
// Startup-Spam vermeiden
if (notificationsOn && !isInitial) {
notify.info(
modelName,
cameBackFromOffline ? 'ist wieder online.' : 'ist online.',
{
imageUrl,
imageAlt: `${modelName} Vorschau`,
durationMs: 5500,
onClick: () => {
window.dispatchEvent(
new CustomEvent('open-model-details', {
detail: { modelKey: modelName },
})
)
},
}
)
}
}
if (nowShow) nextShowMap[keyLower] = nowShow
}
// Presence-Snapshot merken
const nextOnline: Record<string, true> = {}
for (const k of Object.keys(nextSnap)) nextOnline[k] = true
lastCbOnlineByKeyLowerRef.current = nextOnline
// ✅ "ever online" merken
everCbOnlineByKeyLowerRef.current = nextEverOnline
cbOnlineInitDoneRef.current = true
lastCbShowByKeyLowerRef.current = nextShowMap
} catch {
// ignore
}
// Online-Keys für Store
const storeKeys = chaturbateStoreKeysLowerRef.current
const nextOnlineStore: Record<string, true> = {}
for (const k of storeKeys || []) {
const kl = String(k || '').trim().toLowerCase()
if (kl && nextSnap[kl]) nextOnlineStore[kl] = true
}
// Pending Watched Rooms (nur im running Tab)
if (!recSettingsRef.current.useChaturbateApi) {
setPendingWatchedRooms([])
} else if (selectedTabRef.current !== 'running') {
// optional: nicht leeren
} else {
const modelsMap = modelsByKeyRef.current
const pendingMap = pendingAutoStartByKeyRef.current
const watchedKeysLower = Array.from(
new Set(
Object.values(modelsMap)
.filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate'))
.map((m) => String(m?.modelKey ?? '').trim().toLowerCase())
.filter(Boolean)
)
)
const queuedKeysLower = Object.keys(pendingMap || {})
.map((k) => String(k || '').trim().toLowerCase())
.filter(Boolean)
const queuedSetLower = new Set(queuedKeysLower)
const keysToCheckLower = Array.from(new Set([...watchedKeysLower, ...queuedKeysLower]))
if (keysToCheckLower.length === 0) {
setPendingWatchedRooms([])
} else {
const nextPending: PendingWatchedRoom[] = []
for (const keyLower of keysToCheckLower) {
const room = nextSnap[keyLower]
if (!room) continue
const username = String(room?.username ?? '').trim()
const currentShow = String(room?.current_show ?? 'unknown')
if (currentShow === 'public' && !queuedSetLower.has(keyLower)) continue
const canonicalUrl = `https://chaturbate.com/${(username || keyLower).trim()}/`
nextPending.push({
id: keyLower,
modelKey: username || keyLower,
url: canonicalUrl,
currentShow,
imageUrl: String((room as any)?.image_url ?? ''),
})
}
nextPending.sort((a, b) => a.modelKey.localeCompare(b.modelKey, undefined, { sensitivity: 'base' }))
setPendingWatchedRooms(nextPending)
}
}
// queued auto-start
if (!recSettingsRef.current.useChaturbateApi) return
if (busyRef.current) return
const pendingMap = pendingAutoStartByKeyRef.current
const keys = Object.keys(pendingMap || {})
.map((k) => String(k || '').toLowerCase())
.filter(Boolean)
for (const kLower of keys) {
const room = nextSnap[kLower]
if (!room) continue
if (String(room.current_show ?? '') !== 'public') continue
const url = pendingMap[kLower]
if (!url) continue
// ✅ nicht mehr seriell awaiten, sondern in die Start-Queue
enqueueStart({ url, silent: true, pendingKeyLower: kLower })
}
setLastHeaderUpdateAtMs(Date.now())
})()
},
})
return () => stop()
}, [])
useEffect(() => {
// ✅ nur sinnvoll, wenn Chaturbate API aktiv ist
if (!recSettings.useChaturbateApi) {
setOnlineModelsCount(0)
return
}
const stop = startChaturbateOnlinePolling({
// ✅ leer => ALL-mode (durch fetchAllWhenNoModels)
getModels: () => [],
getShow: () => ['public', 'private', 'hidden', 'away'],
// deutlich seltener, weil potentiell groß
intervalMs: 30000,
fetchAllWhenNoModels: true,
onData: (data) => {
if (!data?.enabled) {
setOnlineModelsCount(0)
return
}
const total = Number((data as any)?.total ?? 0)
setOnlineModelsCount(Number.isFinite(total) ? total : 0)
setLastHeaderUpdateAtMs(Date.now())
},
onError: (e) => {
console.error('[ALL-online poller] error', e)
},
})
return () => stop()
}, [recSettings.useChaturbateApi])
if (!authChecked) { if (!authChecked) {
return <div className="min-h-[100dvh] grid place-items-center">Lade</div> return <div className="min-h-[100dvh] grid place-items-center">Lade</div>
} }
@ -2792,8 +2520,22 @@ export default function App() {
<div className="relative"> <div className="relative">
<label className="sr-only">Source URL</label> <label className="sr-only">Source URL</label>
<input <input
ref={sourceUrlInputRef}
value={sourceUrl} value={sourceUrl}
onChange={(e) => setSourceUrl(e.target.value)} onChange={(e) => setSourceUrl(e.target.value)}
onMouseDown={(e) => {
// nur Linksklick
if (e.button !== 0) return
// wenn schon fokussiert: Browser soll Caret nicht irgendwohin setzen
// und wir markieren gleich alles
e.preventDefault()
selectSourceUrl()
}}
onFocus={() => {
// z.B. Tab-Navigation ins Feld
selectSourceUrl()
}}
placeholder="https://…" placeholder="https://…"
className="block w-full rounded-lg px-3 py-2.5 text-sm bg-white text-gray-900 shadow-sm ring-1 ring-gray-200 focus:outline-none focus:ring-2 focus:ring-indigo-500 dark:bg-white/10 dark:text-white dark:ring-white/10" className="block w-full rounded-lg px-3 py-2.5 text-sm bg-white text-gray-900 shadow-sm ring-1 ring-gray-200 focus:outline-none focus:ring-2 focus:ring-indigo-500 dark:bg-white/10 dark:text-white dark:ring-white/10"
/> />
@ -2919,11 +2661,18 @@ export default function App() {
runningJobs={runningJobs} runningJobs={runningJobs}
cookies={cookies} cookies={cookies}
blurPreviews={recSettings.blurPreviews} blurPreviews={recSettings.blurPreviews}
// ✅ neu: gleiche Teaser-Settings wie FinishedDownloads
teaserPlayback={recSettings.teaserPlayback ?? 'hover'}
teaserAudio={Boolean(recSettings.teaserAudio)}
onToggleHot={handleToggleHot} onToggleHot={handleToggleHot}
onDelete={handleDeleteJob} onDelete={handleDeleteJob}
onKeep={handleKeepJob} // ✅ neu
onToggleFavorite={handleToggleFavorite} onToggleFavorite={handleToggleFavorite}
onToggleLike={handleToggleLike} onToggleLike={handleToggleLike}
onToggleWatch={handleToggleWatch} onToggleWatch={handleToggleWatch}
onStopJob={stopJob}
/> />
{playerJob ? ( {playerJob ? (

View File

@ -358,6 +358,8 @@ export default function FinishedDownloadsCardsView({
const [scrubActiveByKey, setScrubActiveByKey] = React.useState<Record<string, number | undefined>>({}) const [scrubActiveByKey, setScrubActiveByKey] = React.useState<Record<string, number | undefined>>({})
const [scrubHoveringByKey, setScrubHoveringByKey] = React.useState<Record<string, boolean | undefined>>({}) const [scrubHoveringByKey, setScrubHoveringByKey] = React.useState<Record<string, boolean | undefined>>({})
const [hoveredThumbKey, setHoveredThumbKey] = React.useState<string | null>(null)
const setScrubActiveIndex = React.useCallback((key: string, index: number | undefined) => { const setScrubActiveIndex = React.useCallback((key: string, index: number | undefined) => {
setScrubActiveByKey((prev) => { setScrubActiveByKey((prev) => {
if (index === undefined) { if (index === undefined) {
@ -619,11 +621,18 @@ export default function FinishedDownloadsCardsView({
} }
className="group/thumb relative aspect-video rounded-t-lg bg-black/5 dark:bg-white/5" className="group/thumb relative aspect-video rounded-t-lg bg-black/5 dark:bg-white/5"
onMouseEnter={ onMouseEnter={
isSmall || opts?.disablePreviewHover ? undefined : () => onHoverPreviewKeyChange?.(k) isSmall || opts?.disablePreviewHover
? undefined
: () => {
setHoveredThumbKey(k)
onHoverPreviewKeyChange?.(k)
}
} }
onMouseLeave={() => { onMouseLeave={() => {
if (!isSmall && !opts?.disablePreviewHover) onHoverPreviewKeyChange?.(null) if (!isSmall && !opts?.disablePreviewHover) onHoverPreviewKeyChange?.(null)
setHoveredThumbKey((prev) => (prev === k ? null : prev))
clearScrubActiveIndex(k) clearScrubActiveIndex(k)
setScrubHovering(k, false)
}} }}
onClick={(e) => { onClick={(e) => {
e.preventDefault() e.preventDefault()
@ -674,8 +683,6 @@ export default function FinishedDownloadsCardsView({
animated={allowTeaserAnimation} animated={allowTeaserAnimation}
animatedMode="teaser" animatedMode="teaser"
animatedTrigger="always" animatedTrigger="always"
clipSeconds={1}
thumbSamples={18}
inlineVideo={!opts?.disableInline && inlineActive ? 'always' : false} inlineVideo={!opts?.disableInline && inlineActive ? 'always' : false}
inlineNonce={inlineNonce} inlineNonce={inlineNonce}
inlineControls={inlineActive} inlineControls={inlineActive}
@ -718,7 +725,11 @@ export default function FinishedDownloadsCardsView({
) : null} ) : null}
{/* ✅ stashapp-artiger Hover-Scrubber (wie GalleryView) */} {/* ✅ stashapp-artiger Hover-Scrubber (wie GalleryView) */}
{!opts?.isDecorative && !opts?.disableScrubber && !inlineActive && scrubberCount > 1 ? ( {!opts?.isDecorative &&
!opts?.disableScrubber &&
!inlineActive &&
scrubberCount > 1 &&
hoveredThumbKey === k ? (
<div <div
className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150" className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150"
onClick={(e) => e.stopPropagation()} onClick={(e) => e.stopPropagation()}
@ -726,7 +737,6 @@ export default function FinishedDownloadsCardsView({
onMouseEnter={() => setScrubHovering(k, true)} onMouseEnter={() => setScrubHovering(k, true)}
onMouseLeave={() => { onMouseLeave={() => {
setScrubHovering(k, false) setScrubHovering(k, false)
// optional: Index sofort loslassen, dann springt Bar direkt zurück auf Teaser
setScrubActiveIndex(k, undefined) setScrubActiveIndex(k, undefined)
}} }}
> >
@ -736,46 +746,29 @@ export default function FinishedDownloadsCardsView({
activeIndex={scrubActiveIndex} activeIndex={scrubActiveIndex}
onActiveIndexChange={(idx) => setScrubActiveIndex(k, idx)} onActiveIndexChange={(idx) => setScrubActiveIndex(k, idx)}
onIndexClick={(index) => { onIndexClick={(index) => {
// wie Preview-Klick: inline starten
if (isSmall || opts?.disableInline) { if (isSmall || opts?.disableInline) {
// Mobile/Decorative/Fallback: bestehendes Verhalten
handleScrubberClickIndex(j, index, scrubberCount) handleScrubberClickIndex(j, index, scrubberCount)
return return
} }
// Zielsekunde aus Scrubber ableiten const seconds = scrubberStepSeconds > 0 ? index * scrubberStepSeconds : 0
const seconds =
scrubberStepSeconds > 0
? index * scrubberStepSeconds
: 0
// 1) bevorzugt: direkt inline an Position starten (falls Parent das unterstützt)
if (startInlineAt) { if (startInlineAt) {
startInlineAt(k, seconds, inlineDomId) startInlineAt(k, seconds, inlineDomId)
// wie bei Tap im Mobile-Stack: Autoplay nochmal anschubsen
requestAnimationFrame(() => { requestAnimationFrame(() => {
if (!tryAutoplayInline(inlineDomId)) { if (!tryAutoplayInline(inlineDomId)) {
requestAnimationFrame(() => { requestAnimationFrame(() => tryAutoplayInline(inlineDomId))
tryAutoplayInline(inlineDomId)
})
} }
}) })
return return
} }
// 2) Fallback: inline normal starten (ohne exakten Seek)
startInline(k) startInline(k)
requestAnimationFrame(() => { requestAnimationFrame(() => {
if (!tryAutoplayInline(inlineDomId)) { if (!tryAutoplayInline(inlineDomId)) {
requestAnimationFrame(() => { requestAnimationFrame(() => tryAutoplayInline(inlineDomId))
tryAutoplayInline(inlineDomId)
})
} }
}) })
// 3) Optionaler Fallback auf bestehenden Handler (wenn du dort OpenPlayerAt machst)
// handleScrubberClickIndex(j, index, scrubberCount)
}} }}
stepSeconds={scrubberStepSeconds} stepSeconds={scrubberStepSeconds}
/> />

View File

@ -247,6 +247,8 @@ export default function FinishedDownloadsGalleryView({
// ✅ stashapp-artiger Hover-Scrubber-Zustand (pro Karte) // ✅ stashapp-artiger Hover-Scrubber-Zustand (pro Karte)
const [scrubIndexByKey, setScrubIndexByKey] = React.useState<Record<string, number | undefined>>({}) const [scrubIndexByKey, setScrubIndexByKey] = React.useState<Record<string, number | undefined>>({})
const [hoveredThumbKey, setHoveredThumbKey] = React.useState<string | null>(null)
const setScrubIndexForKey = React.useCallback((key: string, index: number | undefined) => { const setScrubIndexForKey = React.useCallback((key: string, index: number | undefined) => {
setScrubIndexByKey((prev) => { setScrubIndexByKey((prev) => {
if (index === undefined) { if (index === undefined) {
@ -464,8 +466,12 @@ export default function FinishedDownloadsGalleryView({
<div <div
className="group/thumb relative aspect-video rounded-t-lg bg-black/5 dark:bg-white/5" className="group/thumb relative aspect-video rounded-t-lg bg-black/5 dark:bg-white/5"
ref={registerTeaserHostIfNeeded(k)} ref={registerTeaserHostIfNeeded(k)}
onMouseEnter={() => onHoverPreviewKeyChange?.(k)} onMouseEnter={() => {
setHoveredThumbKey(k)
onHoverPreviewKeyChange?.(k)
}}
onMouseLeave={() => { onMouseLeave={() => {
setHoveredThumbKey((prev) => (prev === k ? null : prev))
onHoverPreviewKeyChange?.(null) onHoverPreviewKeyChange?.(null)
clearScrubIndex(k) clearScrubIndex(k)
setHoveredModelPreviewKey((prev) => (prev === k ? null : prev)) setHoveredModelPreviewKey((prev) => (prev === k ? null : prev))
@ -493,8 +499,6 @@ export default function FinishedDownloadsGalleryView({
} }
animatedMode="teaser" animatedMode="teaser"
animatedTrigger="always" animatedTrigger="always"
clipSeconds={1}
thumbSamples={18}
muted={previewMuted} muted={previewMuted}
popoverMuted={previewMuted} popoverMuted={previewMuted}
scrubProgressRatio={scrubProgressRatio} scrubProgressRatio={scrubProgressRatio}
@ -542,7 +546,7 @@ export default function FinishedDownloadsGalleryView({
) : null} ) : null}
{/* ✅ stashapp-artiger Hover-Scrubber (UI-only) */} {/* ✅ stashapp-artiger Hover-Scrubber (UI-only) */}
{hasScrubber ? ( {hasScrubber && hoveredThumbKey === k ? (
<div <div
className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150" className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150"
onClick={(e) => e.stopPropagation()} onClick={(e) => e.stopPropagation()}
@ -554,10 +558,7 @@ export default function FinishedDownloadsGalleryView({
activeIndex={activeScrubIndex} activeIndex={activeScrubIndex}
onActiveIndexChange={(idx) => setScrubIndexForKey(k, idx)} onActiveIndexChange={(idx) => setScrubIndexForKey(k, idx)}
onIndexClick={(index) => { onIndexClick={(index) => {
// optional: UI-Zustand direkt sichtbar halten
setScrubIndexForKey(k, index) setScrubIndexForKey(k, index)
// bestehender Handler (Parent entscheidet: openPlayerAt / modal / etc.)
handleScrubberClickIndex(j, index, scrubberCount) handleScrubberClickIndex(j, index, scrubberCount)
}} }}
stepSeconds={scrubberStepSeconds} stepSeconds={scrubberStepSeconds}

View File

@ -81,6 +81,10 @@ export type FinishedVideoPreviewProps = {
preferScrubProgress?: boolean preferScrubProgress?: boolean
} }
function baseName(path: string) {
return (path || '').split(/[\\/]/).pop() || ''
}
export default function FinishedVideoPreview({ export default function FinishedVideoPreview({
job, job,
getFileName, getFileName,
@ -121,7 +125,7 @@ export default function FinishedVideoPreview({
scrubProgressRatio, scrubProgressRatio,
preferScrubProgress = false, preferScrubProgress = false,
}: FinishedVideoPreviewProps) { }: FinishedVideoPreviewProps) {
const file = getFileName(job.output || '') const file = baseName(job.output || '') || getFileName(job.output || '')
const blurCls = blur ? 'blur-md' : '' const blurCls = blur ? 'blur-md' : ''
// ✅ meta robust normalisieren (job.meta kann string sein) // ✅ meta robust normalisieren (job.meta kann string sein)
@ -356,7 +360,7 @@ export default function FinishedVideoPreview({
const stripHot = (s: string) => (s.startsWith('HOT ') ? s.slice(4) : s) const stripHot = (s: string) => (s.startsWith('HOT ') ? s.slice(4) : s)
const previewId = useMemo(() => { const previewId = useMemo(() => {
const f = getFileName(job.output || '') const f = baseName(job.output || '') || getFileName(job.output || '')
if (!f) return '' if (!f) return ''
const base = f.replace(/\.[^.]+$/, '') // ext weg const base = f.replace(/\.[^.]+$/, '') // ext weg
return stripHot(base).trim() return stripHot(base).trim()

View File

@ -1,3 +1,5 @@
// frontend\src\components\ui\LiveHlsVideo.tsx
'use client' 'use client'
import { useEffect, useMemo, useRef, useState } from 'react' import { useEffect, useMemo, useRef, useState } from 'react'
@ -28,6 +30,8 @@ export default function LiveHlsVideo({
// ✅ manifestUrl ist stabil pro reloadKey // ✅ manifestUrl ist stabil pro reloadKey
const manifestUrl = useMemo(() => withNonce(src, reloadKey), [src, reloadKey]) const manifestUrl = useMemo(() => withNonce(src, reloadKey), [src, reloadKey])
const lastReloadAtRef = useRef(0)
useEffect(() => { useEffect(() => {
let cancelled = false let cancelled = false
let hls: Hls | null = null let hls: Hls | null = null
@ -52,8 +56,13 @@ export default function LiveHlsVideo({
const hardReload = () => { const hardReload = () => {
if (cancelled) return if (cancelled) return
const now = Date.now()
// ✅ verhindert Reload-Stürme (z.B. wenn hls.js kurz zickt)
if (now - lastReloadAtRef.current < 4000) return
lastReloadAtRef.current = now
cleanupTimers() cleanupTimers()
// ✅ Effect neu starten
setReloadKey((x) => x + 1) setReloadKey((x) => x + 1)
} }
@ -74,7 +83,7 @@ export default function LiveHlsVideo({
if (txt.includes('#EXTINF')) return { ok: true } if (txt.includes('#EXTINF')) return { ok: true }
} }
} catch {} } catch {}
await new Promise((res) => setTimeout(res, 500)) await new Promise((res) => setTimeout(res, 1200))
} }
// kein reason => "noch nicht ready" // kein reason => "noch nicht ready"
@ -108,7 +117,7 @@ export default function LiveHlsVideo({
video.src = manifestUrl video.src = manifestUrl
video.load() video.load()
video.play().catch(() => {}) video.play().catch((e) => console.debug('[LiveHlsVideo] play() failed', e))
// ---- Stall Handling (native) ---- // ---- Stall Handling (native) ----
let lastProgressTs = Date.now() let lastProgressTs = Date.now()
@ -154,9 +163,31 @@ export default function LiveHlsVideo({
} }
hls = new Hls({ hls = new Hls({
lowLatencyMode: true, lowLatencyMode: false,
liveSyncDurationCount: 2,
maxBufferLength: 8, // ✅ Live: nicht super-aggressiv hinterherlaufen
liveSyncDurationCount: 3,
liveMaxLatencyDurationCount: 10,
// Buffer
maxBufferLength: 12,
backBufferLength: 30,
// ✅ Netzwerk-Retry-Backoff (verhindert Request-Stürme)
manifestLoadingTimeOut: 8000,
manifestLoadingMaxRetry: 6,
manifestLoadingRetryDelay: 1000,
manifestLoadingMaxRetryTimeout: 8000,
levelLoadingTimeOut: 8000,
levelLoadingMaxRetry: 6,
levelLoadingRetryDelay: 1000,
levelLoadingMaxRetryTimeout: 8000,
fragLoadingTimeOut: 8000,
fragLoadingMaxRetry: 6,
fragLoadingRetryDelay: 1000,
fragLoadingMaxRetryTimeout: 8000,
}) })
hls.on(Hls.Events.ERROR, (_evt, data) => { hls.on(Hls.Events.ERROR, (_evt, data) => {
@ -180,7 +211,7 @@ export default function LiveHlsVideo({
hls.attachMedia(video) hls.attachMedia(video)
hls.on(Hls.Events.MANIFEST_PARSED, () => { hls.on(Hls.Events.MANIFEST_PARSED, () => {
video.play().catch(() => {}) video.play().catch((e) => console.debug('[LiveHlsVideo] play() failed', e))
}) })
} }

File diff suppressed because it is too large Load Diff

View File

@ -42,7 +42,7 @@ export default function ModelPreview({
thumbsCandidates, thumbsCandidates,
}: Props) { }: Props) {
const blurCls = blur ? 'blur-md' : '' const blurCls = blur ? 'blur-md' : ''
const CONTROLBAR_H = 30 const CONTROLBAR_H = 0
const rootRef = useRef<HTMLDivElement | null>(null) const rootRef = useRef<HTMLDivElement | null>(null)
@ -269,7 +269,7 @@ export default function ModelPreview({
}} }}
> >
<div className="absolute inset-0"> <div className="absolute inset-0">
<LiveHlsVideo src={hq} muted={false} className="w-full h-full object-contain object-bottom relative z-0" /> <LiveHlsVideo src={hq} muted={true} className="w-full h-full object-contain object-bottom relative z-0" />
<div className="absolute left-2 top-2 inline-flex items-center gap-1.5 rounded-full bg-red-600/90 px-2 py-1 text-[11px] font-semibold text-white shadow-sm"> <div className="absolute left-2 top-2 inline-flex items-center gap-1.5 rounded-full bg-red-600/90 px-2 py-1 text-[11px] font-semibold text-white shadow-sm">
<span className="inline-block size-1.5 rounded-full bg-white animate-pulse" /> <span className="inline-block size-1.5 rounded-full bg-white animate-pulse" />

View File

@ -170,7 +170,7 @@ export default function Pagination({
return ( return (
<div <div
className={clsx( className={clsx(
'flex items-center justify-between border-t border-gray-200 bg-white px-4 py-3 sm:px-6 dark:border-white/10 dark:bg-transparent', 'flex items-center justify-between bg-white dark:border-white/10 dark:bg-transparent',
className className
)} )}
> >
@ -198,7 +198,7 @@ export default function Pagination({
<div className="hidden sm:flex sm:flex-1 sm:items-center sm:justify-between"> <div className="hidden sm:flex sm:flex-1 sm:items-center sm:justify-between">
<div> <div>
{showSummary ? ( {showSummary ? (
<p className="text-sm text-gray-700 dark:text-gray-300"> <p className="text-sm text-gray-700 dark:text-gray-300 px-2">
Showing <span className="font-medium">{from}</span> to{' '} Showing <span className="font-medium">{from}</span> to{' '}
<span className="font-medium">{to}</span> of{' '} <span className="font-medium">{to}</span> of{' '}
<span className="font-medium">{totalItems}</span> results <span className="font-medium">{totalItems}</span> results

View File

@ -18,6 +18,7 @@ export default defineConfig({
target: "http://localhost:9999", target: "http://localhost:9999",
changeOrigin: true, changeOrigin: true,
secure: false, secure: false,
ws: true
}, },
}, },
} }