This commit is contained in:
Linrador 2026-02-12 11:33:21 +01:00
parent 76ea79a1a9
commit 97eafb10e7
28 changed files with 3398 additions and 1622 deletions

View File

@ -1,3 +1,5 @@
// backend\chaturbate_autostart.go
package main package main
import ( import (
@ -45,6 +47,7 @@ func cookieHeaderFromSettings(s RecorderSettings) string {
if err != nil || len(m) == 0 { if err != nil || len(m) == 0 {
return "" return ""
} }
keys := make([]string, 0, len(m)) keys := make([]string, 0, len(m))
for k := range m { for k := range m {
keys = append(keys, k) keys = append(keys, k)
@ -52,18 +55,25 @@ func cookieHeaderFromSettings(s RecorderSettings) string {
sort.Strings(keys) sort.Strings(keys)
var b strings.Builder var b strings.Builder
for i, k := range keys { first := true
for _, k := range keys {
v := strings.TrimSpace(m[k]) v := strings.TrimSpace(m[k])
k = strings.TrimSpace(k)
if k == "" || v == "" { if k == "" || v == "" {
continue continue
} }
if i > 0 {
if !first {
b.WriteString("; ") b.WriteString("; ")
} }
first = false
b.WriteString(k) b.WriteString(k)
b.WriteString("=") b.WriteString("=")
b.WriteString(v) b.WriteString(v)
} }
return b.String() return b.String()
} }
@ -82,7 +92,9 @@ func resolveChaturbateURL(m WatchedModelLite) string {
// Startet watched+online(public) automatisch unabhängig vom Frontend // Startet watched+online(public) automatisch unabhängig vom Frontend
func startChaturbateAutoStartWorker(store *ModelStore) { func startChaturbateAutoStartWorker(store *ModelStore) {
if store == nil { if store == nil {
fmt.Println("⚠️ [autostart] model store is nil") if verboseLogs() {
fmt.Println("⚠️ [autostart] model store is nil")
}
return return
} }
@ -213,9 +225,13 @@ func startChaturbateAutoStartWorker(store *ModelStore) {
Cookie: cookieHdr, Cookie: cookieHdr,
}) })
if err != nil { if err != nil {
fmt.Println("❌ [autostart] start failed:", it.url, err) if verboseLogs() {
fmt.Println("❌ [autostart] start failed:", it.url, err)
}
} else { } else {
fmt.Println("▶️ [autostart] started:", it.url) if verboseLogs() {
fmt.Println("▶️ [autostart] started:", it.url)
}
lastStart = time.Now() lastStart = time.Now()
} }
} }

View File

@ -261,7 +261,7 @@ func startChaturbateOnlinePoller(store *ModelStore) {
fmt.Println("✅ [chaturbate] online rooms fetch recovered") fmt.Println("✅ [chaturbate] online rooms fetch recovered")
lastLoggedErr = "" lastLoggedErr = ""
} }
if len(rooms) != lastLoggedCount { if verboseLogs() && len(rooms) != lastLoggedCount {
fmt.Println("✅ [chaturbate] online rooms:", len(rooms)) fmt.Println("✅ [chaturbate] online rooms:", len(rooms))
lastLoggedCount = len(rooms) lastLoggedCount = len(rooms)
} }

Binary file not shown.

Binary file not shown.

99
backend/log_policy.go Normal file
View File

@ -0,0 +1,99 @@
// backend/log_policy.go
package main
import (
"context"
"errors"
"os"
"strings"
)
// Optional: Verbose nur wenn du es explizit willst (z.B. beim Debuggen)
func verboseLogs() bool {
return os.Getenv("REC_VERBOSE") == "1"
}
func shouldLogRecordError(err error, provider string, req RecordRequest) bool {
if err == nil {
return false
}
// "STOP" / Cancel ist normal -> kein Fehlerlog
if errors.Is(err, context.Canceled) {
return false
}
msg := strings.ToLower(err.Error())
// --- Chaturbate: Cookie/Auth/CF-Probleme IMMER loggen (auch bei Hidden) ---
if provider == "chaturbate" {
// deine explizite Cookie-Fehlermeldung
if strings.Contains(msg, "cf_clearance") && strings.Contains(msg, "cookie") {
return true
}
// typische Auth/CF/Blocker-Indikatoren
if strings.Contains(msg, "403") || strings.Contains(msg, "401") ||
strings.Contains(msg, "cloudflare") || strings.Contains(msg, "cf") ||
strings.Contains(msg, "captcha") || strings.Contains(msg, "forbidden") {
return true
}
}
// --- harte Config/IO-Fehler (immer loggen) ---
if strings.Contains(msg, "recorddir") ||
strings.Contains(msg, "auflösung fehlgeschlagen") ||
strings.Contains(msg, "permission") ||
strings.Contains(msg, "access is denied") ||
strings.Contains(msg, "read-only") {
return true
}
// --- erwartbare "Provider/Offline"-Situationen: NIE loggen ---
// unsupported provider
if strings.Contains(msg, "unsupported provider") {
return false
}
// Chaturbate offline/parse/watch-segments end
if strings.Contains(msg, "kein hls") ||
strings.Contains(msg, "room dossier") ||
strings.Contains(msg, "keine neuen hls-segmente") ||
strings.Contains(msg, "playlist nicht mehr erreichbar") ||
strings.Contains(msg, "möglicherweise offline") ||
strings.Contains(msg, "stream vermutlich offline") {
return false
}
// MFC: "nicht public"/offline/private/not exist
if strings.Contains(msg, "mfc: stream wurde nicht public") ||
strings.Contains(msg, "mfc: stream ist nicht public") ||
strings.Contains(msg, "stream ist nicht öffentlich") ||
strings.Contains(msg, "status: offline") ||
strings.Contains(msg, "status: private") ||
strings.Contains(msg, "status: notexist") {
return false
}
// ffmpeg-Fehler:
// - bei Hidden (Autostart/Auto-Checks) meist "offline/kurzlebig" => stumm
// - bei manuell gestarteten Jobs sinnvoll => loggen
if strings.Contains(msg, "ffmpeg") {
if req.Hidden {
return false
}
return true
}
// Default:
// - Hidden-Jobs sollen ruhig sein
// - manuelle Jobs dürfen Fehler loggen (aber keine "offline"/"expected" s.o.)
return !req.Hidden
}
func shouldLogRecordInfo(req RecordRequest) bool {
// Standard: keine Info-Logs (wie auto-deleted), außer du setzt REC_VERBOSE=1
if verboseLogs() {
return true
}
return false
}

View File

@ -39,7 +39,6 @@ import (
"github.com/grafov/m3u8" "github.com/grafov/m3u8"
gocpu "github.com/shirou/gopsutil/v3/cpu" gocpu "github.com/shirou/gopsutil/v3/cpu"
godisk "github.com/shirou/gopsutil/v3/disk" godisk "github.com/shirou/gopsutil/v3/disk"
"github.com/sqweek/dialog"
"golang.org/x/image/font" "golang.org/x/image/font"
"golang.org/x/image/font/basicfont" "golang.org/x/image/font/basicfont"
"golang.org/x/image/math/fixed" "golang.org/x/image/math/fixed"
@ -69,6 +68,7 @@ type RecordJob struct {
VideoWidth int `json:"videoWidth,omitempty"` VideoWidth int `json:"videoWidth,omitempty"`
VideoHeight int `json:"videoHeight,omitempty"` VideoHeight int `json:"videoHeight,omitempty"`
FPS float64 `json:"fps,omitempty"` FPS float64 `json:"fps,omitempty"`
Meta *videoMeta `json:"meta,omitempty"`
Hidden bool `json:"-"` Hidden bool `json:"-"`
@ -184,6 +184,163 @@ func probeVideoProps(ctx context.Context, filePath string) (w int, h int, fps fl
return w, h, fps, nil return w, h, fps, nil
} }
func metaJSONPathForAssetID(assetID string) (string, error) {
root, err := generatedMetaRoot()
if err != nil {
return "", err
}
if strings.TrimSpace(root) == "" {
return "", fmt.Errorf("generated/meta root leer")
}
return filepath.Join(root, assetID, "meta.json"), nil
}
func readVideoMetaIfValid(metaPath string, fi os.FileInfo) (*videoMeta, bool) {
b, err := os.ReadFile(metaPath)
if err != nil || len(b) == 0 {
return nil, false
}
var m videoMeta
if err := json.Unmarshal(b, &m); err != nil {
return nil, false
}
// nur akzeptieren wenn Datei identisch (damit wir nicht stale Werte zeigen)
if m.FileSize != fi.Size() || m.FileModUnix != fi.ModTime().Unix() {
return nil, false
}
// Mindestvalidierung
if m.DurationSeconds <= 0 {
return nil, false
}
return &m, true
}
func ensureVideoMetaForFile(ctx context.Context, fullPath string, fi os.FileInfo, sourceURL string) (*videoMeta, bool) {
// assetID aus Dateiname
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
// sanitize wie bei deinen generated Ordnern
var err error
assetID, err = sanitizeID(assetID)
if err != nil || assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil {
return nil, false
}
// 1) valid meta vorhanden?
if m, ok := readVideoMetaIfValid(metaPath, fi); ok {
return m, true
}
// 2) sonst neu erzeugen (mit Concurrency-Limit)
if ctx == nil {
ctx = context.Background()
}
cctx, cancel := context.WithTimeout(ctx, 8*time.Second)
defer cancel()
if durSem != nil {
if err := durSem.Acquire(cctx); err != nil {
return nil, false
}
defer durSem.Release()
}
// Dauer
dur, derr := durationSecondsCached(cctx, fullPath)
if derr != nil || dur <= 0 {
return nil, false
}
// Video props
w, h, fps, perr := probeVideoProps(cctx, fullPath)
if perr != nil {
// width/height/fps dürfen 0 bleiben, duration ist aber trotzdem nützlich
w, h, fps = 0, 0, 0
}
// meta dir anlegen
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
m := &videoMeta{
Version: 2,
DurationSeconds: dur,
FileSize: fi.Size(),
FileModUnix: fi.ModTime().Unix(),
VideoWidth: w,
VideoHeight: h,
FPS: fps,
Resolution: formatResolution(w, h),
SourceURL: strings.TrimSpace(sourceURL),
UpdatedAtUnix: time.Now().Unix(),
}
b, _ := json.MarshalIndent(m, "", " ")
b = append(b, '\n')
_ = atomicWriteFile(metaPath, b) // best effort
return m, true
}
// ensureVideoMetaForFileBestEffort:
// - versucht zuerst echtes Generieren (ffprobe/ffmpeg) via ensureVideoMetaForFile
// - wenn das fehlschlägt, aber durationSecondsCacheOnly schon was weiß:
// schreibt eine Duration-only meta.json, damit wir künftig "aus meta.json" lesen können.
func ensureVideoMetaForFileBestEffort(ctx context.Context, fullPath string, sourceURL string) (*videoMeta, bool) {
fullPath = strings.TrimSpace(fullPath)
if fullPath == "" {
return nil, false
}
fi, err := os.Stat(fullPath)
if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 {
return nil, false
}
// 1) Normaler Weg: meta erzeugen/lesen (ffprobe/ffmpeg)
if m, ok := ensureVideoMetaForFile(ctx, fullPath, fi, sourceURL); ok && m != nil {
return m, true
}
// 2) Fallback: wenn wir Duration schon im RAM-Cache haben -> meta.json (Duration-only) persistieren
dur := durationSecondsCacheOnly(fullPath, fi)
if dur <= 0 {
return nil, false
}
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil || strings.TrimSpace(metaPath) == "" {
return nil, false
}
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
_ = writeVideoMetaDuration(metaPath, fi, dur, sourceURL)
// nochmal lesen/validieren
if m, ok := readVideoMetaIfValid(metaPath, fi); ok && m != nil {
return m, true
}
return nil, false
}
func (d *dummyResponseWriter) Header() http.Header { func (d *dummyResponseWriter) Header() http.Header {
if d.h == nil { if d.h == nil {
d.h = make(http.Header) d.h = make(http.Header)
@ -1373,72 +1530,6 @@ func durationSecondsCached(ctx context.Context, path string) (float64, error) {
return sec, nil return sec, nil
} }
type RecorderSettings struct {
RecordDir string `json:"recordDir"`
DoneDir string `json:"doneDir"`
FFmpegPath string `json:"ffmpegPath"`
AutoAddToDownloadList bool `json:"autoAddToDownloadList"`
AutoStartAddedDownloads bool `json:"autoStartAddedDownloads"`
UseChaturbateAPI bool `json:"useChaturbateApi"`
UseMyFreeCamsWatcher bool `json:"useMyFreeCamsWatcher"`
// Wenn aktiv, werden fertige Downloads automatisch gelöscht, wenn sie kleiner als der Grenzwert sind.
AutoDeleteSmallDownloads bool `json:"autoDeleteSmallDownloads"`
AutoDeleteSmallDownloadsBelowMB int `json:"autoDeleteSmallDownloadsBelowMB"`
BlurPreviews bool `json:"blurPreviews"`
TeaserPlayback string `json:"teaserPlayback"` // still | hover | all
TeaserAudio bool `json:"teaserAudio"` // ✅ Vorschau/Teaser mit Ton abspielen
// EncryptedCookies contains base64(nonce+ciphertext) of a JSON cookie map.
EncryptedCookies string `json:"encryptedCookies"`
}
var (
settingsMu sync.Mutex
settings = RecorderSettings{
RecordDir: "/records",
DoneDir: "/records/done",
FFmpegPath: "",
AutoAddToDownloadList: false,
AutoStartAddedDownloads: false,
UseChaturbateAPI: false,
UseMyFreeCamsWatcher: false,
AutoDeleteSmallDownloads: false,
AutoDeleteSmallDownloadsBelowMB: 50,
BlurPreviews: false,
TeaserPlayback: "hover",
TeaserAudio: false,
EncryptedCookies: "",
}
settingsFile = "recorder_settings.json"
)
func settingsFilePath() string {
// optionaler Override per ENV
name := strings.TrimSpace(os.Getenv("RECORDER_SETTINGS_FILE"))
if name == "" {
name = settingsFile
}
// Standard: relativ zur EXE / App-Dir (oder fallback auf Working Dir bei go run)
if p, err := resolvePathRelativeToApp(name); err == nil && strings.TrimSpace(p) != "" {
return p
}
// Fallback: so zurückgeben wie es ist
return name
}
func getSettings() RecorderSettings {
settingsMu.Lock()
defer settingsMu.Unlock()
return settings
}
func detectFFmpegPath() string { func detectFFmpegPath() string {
// 0. Settings-Override (ffmpegPath in recorder_settings.json / UI) // 0. Settings-Override (ffmpegPath in recorder_settings.json / UI)
s := getSettings() s := getSettings()
@ -1569,246 +1660,6 @@ func renameGenerated(oldID, newID string) {
} }
} }
func loadSettings() {
p := settingsFilePath()
b, err := os.ReadFile(p)
fmt.Println("🔧 settingsFile:", p)
if err == nil {
s := getSettings() // ✅ startet mit Defaults
if json.Unmarshal(b, &s) == nil {
if strings.TrimSpace(s.RecordDir) != "" {
s.RecordDir = filepath.Clean(strings.TrimSpace(s.RecordDir))
}
if strings.TrimSpace(s.DoneDir) != "" {
s.DoneDir = filepath.Clean(strings.TrimSpace(s.DoneDir))
}
if strings.TrimSpace(s.FFmpegPath) != "" {
s.FFmpegPath = strings.TrimSpace(s.FFmpegPath)
}
s.TeaserPlayback = strings.ToLower(strings.TrimSpace(s.TeaserPlayback))
if s.TeaserPlayback == "" {
s.TeaserPlayback = "hover"
}
if s.TeaserPlayback != "still" && s.TeaserPlayback != "hover" && s.TeaserPlayback != "all" {
s.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if s.AutoDeleteSmallDownloadsBelowMB < 0 {
s.AutoDeleteSmallDownloadsBelowMB = 0
}
if s.AutoDeleteSmallDownloadsBelowMB > 100_000 {
s.AutoDeleteSmallDownloadsBelowMB = 100_000
}
settingsMu.Lock()
settings = s
settingsMu.Unlock()
}
}
// Ordner sicherstellen
s := getSettings()
recordAbs, _ := resolvePathRelativeToApp(s.RecordDir)
doneAbs, _ := resolvePathRelativeToApp(s.DoneDir)
if strings.TrimSpace(recordAbs) != "" {
_ = os.MkdirAll(recordAbs, 0o755)
}
if strings.TrimSpace(doneAbs) != "" {
_ = os.MkdirAll(doneAbs, 0o755)
}
// ffmpeg-Pfad anhand Settings/Env/PATH bestimmen
ffmpegPath = detectFFmpegPath()
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
}
func saveSettingsToDisk() {
s := getSettings()
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
fmt.Println("⚠️ settings marshal:", err)
return
}
b = append(b, '\n')
p := settingsFilePath()
if err := atomicWriteFile(p, b); err != nil {
fmt.Println("⚠️ settings write:", err)
return
}
// optional
// fmt.Println("✅ settings saved:", p)
}
func recordSettingsHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
case http.MethodPost:
var in RecorderSettings
if err := json.NewDecoder(r.Body).Decode(&in); err != nil {
http.Error(w, "invalid json: "+err.Error(), http.StatusBadRequest)
return
}
// --- normalize (WICHTIG: erst trim, dann leer-check, dann clean) ---
recRaw := strings.TrimSpace(in.RecordDir)
doneRaw := strings.TrimSpace(in.DoneDir)
if recRaw == "" || doneRaw == "" {
http.Error(w, "recordDir und doneDir dürfen nicht leer sein", http.StatusBadRequest)
return
}
in.RecordDir = filepath.Clean(recRaw)
in.DoneDir = filepath.Clean(doneRaw)
// Optional aber sehr empfehlenswert: "." verbieten
if in.RecordDir == "." || in.DoneDir == "." {
http.Error(w, "recordDir/doneDir dürfen nicht '.' sein", http.StatusBadRequest)
return
}
in.FFmpegPath = strings.TrimSpace(in.FFmpegPath)
in.TeaserPlayback = strings.ToLower(strings.TrimSpace(in.TeaserPlayback))
if in.TeaserPlayback == "" {
in.TeaserPlayback = "hover"
}
if in.TeaserPlayback != "still" && in.TeaserPlayback != "hover" && in.TeaserPlayback != "all" {
in.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if in.AutoDeleteSmallDownloadsBelowMB < 0 {
in.AutoDeleteSmallDownloadsBelowMB = 0
}
if in.AutoDeleteSmallDownloadsBelowMB > 100_000 {
in.AutoDeleteSmallDownloadsBelowMB = 100_000
}
// --- ensure folders (Fehler zurückgeben, falls z.B. keine Rechte) ---
recAbs, err := resolvePathRelativeToApp(in.RecordDir)
if err != nil {
http.Error(w, "ungültiger recordDir: "+err.Error(), http.StatusBadRequest)
return
}
doneAbs, err := resolvePathRelativeToApp(in.DoneDir)
if err != nil {
http.Error(w, "ungültiger doneDir: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(recAbs, 0o755); err != nil {
http.Error(w, "konnte recordDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(doneAbs, 0o755); err != nil {
http.Error(w, "konnte doneDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
// ✅ Settings im RAM aktualisieren
settingsMu.Lock()
settings = in
settingsMu.Unlock()
// ✅ Settings auf Disk persistieren
saveSettingsToDisk()
// ✅ ffmpeg/ffprobe nach Änderungen neu bestimmen
// Tipp: wenn der User FFmpegPath explizit setzt, nutze den direkt.
if strings.TrimSpace(in.FFmpegPath) != "" {
ffmpegPath = in.FFmpegPath
} else {
ffmpegPath = detectFFmpegPath()
}
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
default:
http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed)
return
}
}
func settingsBrowse(w http.ResponseWriter, r *http.Request) {
target := r.URL.Query().Get("target")
if target != "record" && target != "done" && target != "ffmpeg" {
http.Error(w, "target muss record, done oder ffmpeg sein", http.StatusBadRequest)
return
}
var (
p string
err error
)
if target == "ffmpeg" {
// Dateiauswahl für ffmpeg.exe
p, err = dialog.File().
Title("ffmpeg.exe auswählen").
Load()
} else {
// Ordnerauswahl für record/done
p, err = dialog.Directory().
Title("Ordner auswählen").
Browse()
}
if err != nil {
// User cancelled → 204 No Content ist praktisch fürs Frontend
if strings.Contains(strings.ToLower(err.Error()), "cancel") {
w.WriteHeader(http.StatusNoContent)
return
}
http.Error(w, "auswahl fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
// optional: wenn innerhalb exe-dir, als RELATIV zurückgeben
p = maybeMakeRelativeToExe(p)
w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(map[string]string{"path": p})
}
func maybeMakeRelativeToExe(abs string) string {
exe, err := os.Executable()
if err != nil {
return abs
}
base := filepath.Dir(exe)
rel, err := filepath.Rel(base, abs)
if err != nil {
return abs
}
// wenn rel mit ".." beginnt -> nicht innerhalb base -> absoluten Pfad behalten
if rel == "." || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
return abs
}
return filepath.ToSlash(rel) // frontend-freundlich
}
// --- Gemeinsame Status-Werte für MFC --- // --- Gemeinsame Status-Werte für MFC ---
type Status int type Status int

Binary file not shown.

View File

@ -3,12 +3,13 @@
package main package main
import ( import (
"bytes"
"context" "context"
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io" "io"
"net"
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
@ -21,6 +22,7 @@ import (
"strings" "strings"
"sync" "sync"
"sync/atomic" "sync/atomic"
"syscall"
"time" "time"
) )
@ -209,241 +211,413 @@ func startRecordingFromRequest(w http.ResponseWriter, r *http.Request) {
_ = json.NewEncoder(w).Encode(job) _ = json.NewEncoder(w).Encode(job)
} }
func recordVideo(w http.ResponseWriter, r *http.Request) { // ---- track if headers/body were already written ----
// (Go methods must be at package scope)
type rwTrack struct {
http.ResponseWriter
wrote bool
}
func (t *rwTrack) WriteHeader(statusCode int) {
if t.wrote {
return
}
t.wrote = true
t.ResponseWriter.WriteHeader(statusCode)
}
func (t *rwTrack) Write(p []byte) (int, error) {
if !t.wrote {
t.wrote = true
}
return t.ResponseWriter.Write(p)
}
func recordVideo(w http.ResponseWriter, r *http.Request) {
// ---- wrap writer to detect "already wrote" ----
tw := &rwTrack{ResponseWriter: w}
w = tw
writeErr := func(code int, msg string) {
// Wenn schon Header/Body raus sind, dürfen wir KEIN http.Error mehr machen,
// sonst gibt's "superfluous response.WriteHeader".
if tw.wrote {
fmt.Println("[recordVideo] late error (headers already sent):", code, msg)
return
}
http.Error(w, msg, code) // nutzt WriteHeader+Write -> tw.wrote wird automatisch true
}
writeStatus := func(code int) {
if tw.wrote {
return
}
w.WriteHeader(code) // geht durch rwTrack.WriteHeader
}
// ---- CORS ----
origin := r.Header.Get("Origin") origin := r.Header.Get("Origin")
if origin != "" { if origin != "" {
// ✅ dev origin erlauben (oder "*" wenns dir egal ist)
w.Header().Set("Access-Control-Allow-Origin", origin) w.Header().Set("Access-Control-Allow-Origin", origin)
w.Header().Set("Vary", "Origin") w.Header().Set("Vary", "Origin")
w.Header().Set("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS") w.Header().Set("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS")
w.Header().Set("Access-Control-Allow-Headers", "Range") // Wichtig: Browser schicken bei Video-Range-Requests oft If-Range / If-Modified-Since / If-None-Match.
w.Header().Set("Access-Control-Expose-Headers", "Content-Length, Content-Range, Accept-Ranges") // Wenn du die nicht erlaubst, schlägt der Preflight fehl -> VideoJS sieht "NETWORK error".
w.Header().Set("Access-Control-Allow-Headers", "Range, If-Range, If-Modified-Since, If-None-Match")
w.Header().Set("Access-Control-Expose-Headers", "Content-Length, Content-Range, Accept-Ranges, ETag, Last-Modified, X-Transcode-Offset-Seconds")
w.Header().Set("Access-Control-Allow-Credentials", "true")
} }
if r.Method == http.MethodOptions { if r.Method == http.MethodOptions {
w.WriteHeader(http.StatusNoContent) writeStatus(http.StatusNoContent)
return return
} }
// ✅ einmal lesen (für beide Zweige) + normalisieren // ---- query normalize ----
q := strings.TrimSpace(r.URL.Query().Get("quality")) // Neu: resolution=LOW|MEDIUM|HIGH|ORIGINAL
if strings.EqualFold(q, "auto") { res := strings.TrimSpace(r.URL.Query().Get("resolution"))
q = ""
// Backwards-Compat: falls altes Frontend noch quality nutzt
if res == "" {
res = strings.TrimSpace(r.URL.Query().Get("quality"))
} }
if q != "" {
// früh validieren (liefert sauberen 400 statt später 500) // Normalize: auto/original => leer (== "ORIGINAL" Profil)
if _, ok := profileFromQuality(q); !ok { if strings.EqualFold(res, "auto") || strings.EqualFold(res, "original") {
http.Error(w, "ungültige quality", http.StatusBadRequest) res = ""
}
// Validieren (wenn gesetzt)
if res != "" {
if _, ok := profileFromResolution(res); !ok {
writeErr(http.StatusBadRequest, "ungültige resolution")
return return
} }
} }
fmt.Println("[recordVideo] quality="+q, "file="+r.URL.Query().Get("file"), "id="+r.URL.Query().Get("id")) rawProgress := strings.TrimSpace(r.URL.Query().Get("progress"))
if rawProgress == "" {
rawProgress = strings.TrimSpace(r.URL.Query().Get("p"))
}
// ✅ Wiedergabe über Dateiname (für doneDir / recordDir) // ---- startSec parse (seek position in seconds) ----
if raw := strings.TrimSpace(r.URL.Query().Get("file")); raw != "" { startSec := 0
// explizit decoden (zur Sicherheit) startFrac := -1.0 // wenn 0..1 => Progress-Fraction (currentProgress)
file, err := url.QueryUnescape(raw)
if err != nil {
http.Error(w, "ungültiger file", http.StatusBadRequest)
return
}
file = strings.TrimSpace(file)
// kein Pfad, keine Backslashes, kein Traversal raw := strings.TrimSpace(r.URL.Query().Get("start"))
if file == "" || if raw == "" {
strings.Contains(file, "/") || raw = strings.TrimSpace(r.URL.Query().Get("t"))
strings.Contains(file, "\\") || }
filepath.Base(file) != file {
http.Error(w, "ungültiger file", http.StatusBadRequest) parseFracOrSeconds := func(s string) {
s = strings.TrimSpace(s)
if s == "" {
return return
} }
ext := strings.ToLower(filepath.Ext(file)) // allow "hh:mm:ss" / "mm:ss"
if ext != ".mp4" && ext != ".ts" { if strings.Contains(s, ":") {
http.Error(w, "nicht erlaubt", http.StatusForbidden) parts := strings.Split(s, ":")
return ok := true
} vals := make([]int, 0, len(parts))
for _, p := range parts {
s := getSettings() p = strings.TrimSpace(p)
recordAbs, err := resolvePathRelativeToApp(s.RecordDir) n, err := strconv.Atoi(p)
if err != nil { if err != nil || n < 0 {
http.Error(w, "recordDir auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) ok = false
return break
} }
doneAbs, err := resolvePathRelativeToApp(s.DoneDir) vals = append(vals, n)
if err != nil {
http.Error(w, "doneDir auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
// Kandidaten: erst done (inkl. 1 Level Subdir, aber ohne "keep"),
// dann keep (inkl. 1 Level Subdir), dann recordDir
names := []string{file}
// Falls UI noch ".ts" kennt, die Datei aber schon als ".mp4" existiert:
if ext == ".ts" {
mp4File := strings.TrimSuffix(file, ext) + ".mp4"
names = append(names, mp4File)
}
var outPath string
for _, name := range names {
// done root + done/<subdir>/ (skip "keep")
if p, _, ok := findFileInDirOrOneLevelSubdirs(doneAbs, name, "keep"); ok {
outPath = p
break
} }
// keep root + keep/<subdir>/ if ok {
if p, _, ok := findFileInDirOrOneLevelSubdirs(filepath.Join(doneAbs, "keep"), name, ""); ok { if len(vals) == 2 {
outPath = p startSec = vals[0]*60 + vals[1]
break return
} } else if len(vals) == 3 {
// record root (+ optional 1 Level Subdir) startSec = vals[0]*3600 + vals[1]*60 + vals[2]
if p, _, ok := findFileInDirOrOneLevelSubdirs(recordAbs, name, ""); ok {
outPath = p
break
}
}
if outPath == "" {
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
return
}
outPath = filepath.Clean(strings.TrimSpace(outPath))
// 1) ✅ TS -> MP4 (on-demand remux)
if strings.ToLower(filepath.Ext(outPath)) == ".ts" {
newOut, err := maybeRemuxTS(outPath)
if err != nil {
http.Error(w, "TS kann im Browser nicht abgespielt werden; Remux fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
if strings.TrimSpace(newOut) == "" {
http.Error(w, "TS kann im Browser nicht abgespielt werden; Remux hat keine MP4 erzeugt", http.StatusInternalServerError)
return
}
outPath = filepath.Clean(strings.TrimSpace(newOut))
// sicherstellen, dass wirklich eine MP4 existiert
fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" {
http.Error(w, "Remux-Ergebnis ungültig", http.StatusInternalServerError)
return
}
}
// ✅ Falls Datei ".mp4" heißt, aber eigentlich TS/HTML ist -> nicht als MP4 ausliefern
if strings.ToLower(filepath.Ext(outPath)) == ".mp4" {
kind, _ := sniffVideoKind(outPath)
switch kind {
case "ts":
newOut, err := maybeRemuxTS(outPath)
if err != nil {
http.Error(w, "Datei ist TS (nur .mp4 benannt); Remux fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return return
} }
outPath = filepath.Clean(strings.TrimSpace(newOut))
case "html":
http.Error(w, "Server liefert HTML statt Video (Pfad/Lookup prüfen)", http.StatusInternalServerError)
return
}
}
// 2) ✅ MP4 -> Quality Transcode (on-demand)
w.Header().Set("Cache-Control", "no-store")
stream := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("stream")))
wantStream := stream == "1" || stream == "true" || stream == "yes"
if q != "" && wantStream {
prof, _ := profileFromQuality(q)
// ⚠️ Streaming-Transcode: startet Playback bevor fertig
if err := serveTranscodedStream(r.Context(), w, outPath, prof); err != nil {
http.Error(w, "transcode stream failed: "+err.Error(), http.StatusInternalServerError)
return
} }
return return
} }
if q != "" { // number: seconds OR fraction
var terr error f, err := strconv.ParseFloat(s, 64)
outPath, terr = maybeTranscodeForRequest(r.Context(), outPath, q)
if terr != nil {
http.Error(w, "transcode failed: "+terr.Error(), http.StatusInternalServerError)
return
}
}
serveVideoFile(w, r, outPath)
return
}
// ✅ ALT: Wiedergabe über Job-ID (funktioniert nur solange Job im RAM existiert)
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
jobsMu.Lock()
job, ok := jobs[id]
jobsMu.Unlock()
if !ok {
http.Error(w, "job nicht gefunden", http.StatusNotFound)
return
}
outPath := filepath.Clean(strings.TrimSpace(job.Output))
if outPath == "" {
http.Error(w, "output fehlt", http.StatusNotFound)
return
}
if !filepath.IsAbs(outPath) {
abs, err := resolvePathRelativeToApp(outPath)
if err != nil { if err != nil {
http.Error(w, "pfad auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return return
} }
outPath = abs if f <= 0 {
return
}
// < 1.0 => treat as fraction (currentProgress)
if f > 0 && f < 1.0 {
startFrac = f
return
}
// >= 1.0 => treat as seconds (floor)
startSec = int(f)
} }
fi, err := os.Stat(outPath) parseFracOrSeconds(raw)
if err != nil || fi.IsDir() || fi.Size() == 0 {
http.Error(w, "datei nicht gefunden", http.StatusNotFound) // optional explicit progress overrides fraction
if rawProgress != "" {
f, err := strconv.ParseFloat(strings.TrimSpace(rawProgress), 64)
if err == nil && f > 0 && f < 1.0 {
startFrac = f
}
}
if startSec < 0 {
startSec = 0
}
// ---- resolve outPath from file or id ----
resolveOutPath := func() (string, bool) {
// ✅ Wiedergabe über Dateiname (für doneDir / recordDir)
if rawFile := strings.TrimSpace(r.URL.Query().Get("file")); rawFile != "" {
file, err := url.QueryUnescape(rawFile)
if err != nil {
writeErr(http.StatusBadRequest, "ungültiger file")
return "", false
}
file = strings.TrimSpace(file)
// kein Pfad, keine Backslashes, kein Traversal
if file == "" ||
strings.Contains(file, "/") ||
strings.Contains(file, "\\") ||
filepath.Base(file) != file {
writeErr(http.StatusBadRequest, "ungültiger file")
return "", false
}
ext := strings.ToLower(filepath.Ext(file))
if ext != ".mp4" && ext != ".ts" {
writeErr(http.StatusForbidden, "nicht erlaubt")
return "", false
}
s := getSettings()
recordAbs, err := resolvePathRelativeToApp(s.RecordDir)
if err != nil {
writeErr(http.StatusInternalServerError, "recordDir auflösung fehlgeschlagen: "+err.Error())
return "", false
}
doneAbs, err := resolvePathRelativeToApp(s.DoneDir)
if err != nil {
writeErr(http.StatusInternalServerError, "doneDir auflösung fehlgeschlagen: "+err.Error())
return "", false
}
// Kandidaten: erst done (inkl. 1 Level Subdir, aber ohne "keep"),
// dann keep (inkl. 1 Level Subdir), dann recordDir
names := []string{file}
if ext == ".ts" {
names = append(names, strings.TrimSuffix(file, ext)+".mp4")
}
var outPath string
for _, name := range names {
if p, _, ok := findFileInDirOrOneLevelSubdirs(doneAbs, name, "keep"); ok {
outPath = p
break
}
if p, _, ok := findFileInDirOrOneLevelSubdirs(filepath.Join(doneAbs, "keep"), name, ""); ok {
outPath = p
break
}
if p, _, ok := findFileInDirOrOneLevelSubdirs(recordAbs, name, ""); ok {
outPath = p
break
}
}
if outPath == "" {
writeErr(http.StatusNotFound, "datei nicht gefunden")
return "", false
}
return filepath.Clean(strings.TrimSpace(outPath)), true
}
// ✅ ALT: Wiedergabe über Job-ID (funktioniert nur solange Job im RAM existiert)
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
writeErr(http.StatusBadRequest, "id fehlt")
return "", false
}
jobsMu.Lock()
job, ok := jobs[id]
jobsMu.Unlock()
if !ok {
writeErr(http.StatusNotFound, "job nicht gefunden")
return "", false
}
outPath := filepath.Clean(strings.TrimSpace(job.Output))
if outPath == "" {
writeErr(http.StatusNotFound, "output fehlt")
return "", false
}
if !filepath.IsAbs(outPath) {
abs, err := resolvePathRelativeToApp(outPath)
if err != nil {
writeErr(http.StatusInternalServerError, "pfad auflösung fehlgeschlagen: "+err.Error())
return "", false
}
outPath = abs
}
fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 {
writeErr(http.StatusNotFound, "datei nicht gefunden")
return "", false
}
return outPath, true
}
outPath, ok := resolveOutPath()
if !ok {
return return
} }
// 1) ✅ TS -> MP4 (on-demand remux) // ---- convert progress fraction to seconds (if needed) ----
if startSec == 0 && startFrac > 0 && startFrac < 1.0 {
// ffprobe duration (cached)
if err := ensureFFprobeAvailable(); err == nil {
pctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
dur, derr := getVideoDurationSecondsCached(pctx, outPath)
cancel()
if derr == nil && dur > 0 {
startSec = int(startFrac * dur)
}
}
}
// sanitize + optional bucket align (wie bei GOP-ish seeking)
if startSec < 0 {
startSec = 0
}
startSec = (startSec / 2) * 2
// ---- TS -> MP4 (on-demand remux) ----
if strings.ToLower(filepath.Ext(outPath)) == ".ts" { if strings.ToLower(filepath.Ext(outPath)) == ".ts" {
newOut, err := maybeRemuxTS(outPath) newOut, err := maybeRemuxTS(outPath)
if err != nil { if err != nil {
http.Error(w, "TS Remux fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) writeErr(http.StatusInternalServerError, "TS Remux fehlgeschlagen: "+err.Error())
return return
} }
if strings.TrimSpace(newOut) == "" { if strings.TrimSpace(newOut) == "" {
http.Error(w, "TS kann im Browser nicht abgespielt werden; Remux hat keine MP4 erzeugt", http.StatusInternalServerError) writeErr(http.StatusInternalServerError, "TS kann im Browser nicht abgespielt werden; Remux hat keine MP4 erzeugt")
return return
} }
outPath = filepath.Clean(strings.TrimSpace(newOut)) outPath = filepath.Clean(strings.TrimSpace(newOut))
fi, err := os.Stat(outPath) fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" { if err != nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" {
http.Error(w, "Remux-Ergebnis ungültig", http.StatusInternalServerError) writeErr(http.StatusInternalServerError, "Remux-Ergebnis ungültig")
return return
} }
} }
// 2) ✅ MP4 -> Quality Transcode (on-demand) // ✅ Falls Datei ".mp4" heißt, aber eigentlich TS/HTML ist -> nicht als MP4 ausliefern
if strings.ToLower(filepath.Ext(outPath)) == ".mp4" {
kind, _ := sniffVideoKind(outPath)
switch kind {
case "ts":
newOut, err := maybeRemuxTS(outPath)
if err != nil {
writeErr(http.StatusInternalServerError, "Datei ist TS (nur .mp4 benannt); Remux fehlgeschlagen: "+err.Error())
return
}
outPath = filepath.Clean(strings.TrimSpace(newOut))
case "html":
writeErr(http.StatusInternalServerError, "Server liefert HTML statt Video (Pfad/Lookup prüfen)")
return
}
}
// ---- Quality / Transcode handling ----
w.Header().Set("Cache-Control", "no-store") w.Header().Set("Cache-Control", "no-store")
if q != "" {
stream := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("stream")))
wantStream := stream == "1" || stream == "true" || stream == "yes"
// ✅ Wenn quality gesetzt ist:
if res != "" {
prof, _ := profileFromResolution(res)
// ✅ wenn Quelle schon <= Zielhöhe: ORIGINAL liefern
// ABER NUR wenn wir NICHT seeken und NICHT streamen wollen.
if prof.Height > 0 && startSec == 0 && !wantStream {
if err := ensureFFprobeAvailable(); err == nil {
pctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
if srcH, err := getVideoHeightCached(pctx, outPath); err == nil && srcH > 0 {
if srcH <= prof.Height+8 {
serveVideoFile(w, r, outPath)
return
}
}
}
}
// ✅ 1) Seek (startSec>0): Standard = Segment-Datei transcodieren & dann normal ausliefern (Range-fähig)
// stream=1 kann weiterhin den "live pipe" erzwingen.
if startSec > 0 && !wantStream {
segPath, terr := maybeTranscodeForRequest(r.Context(), outPath, res, startSec)
if terr != nil {
writeErr(http.StatusInternalServerError, "transcode failed: "+terr.Error())
return
}
// ✅ Offset NUR setzen, wenn wir wirklich ab startSec ausliefern (Segment)
w.Header().Set("X-Transcode-Offset-Seconds", strconv.Itoa(startSec))
serveVideoFile(w, r, segPath)
return
}
// ✅ 2) stream=1 ODER startSec>0 mit stream=true: pipe-stream
if wantStream || startSec > 0 {
if startSec > 0 {
// ✅ Offset NUR setzen, wenn wir wirklich ab startSec ausliefern (Stream)
w.Header().Set("X-Transcode-Offset-Seconds", strconv.Itoa(startSec))
}
if err := serveTranscodedStreamAt(r.Context(), w, outPath, prof, startSec); err != nil {
if errors.Is(err, context.Canceled) {
return
}
writeErr(http.StatusInternalServerError, "transcode stream failed: "+err.Error())
return
}
return
}
// ✅ 3) startSec==0: Full-file Cache-Transcode (wie vorher)
if startSec == 0 {
segPath, terr := maybeTranscodeForRequest(r.Context(), outPath, res, 0)
if terr != nil {
writeErr(http.StatusInternalServerError, "transcode failed: "+terr.Error())
return
}
serveVideoFile(w, r, segPath)
return
}
}
// ✅ Full-file Cache-Transcode nur wenn startSec == 0
if res != "" && startSec == 0 {
var terr error var terr error
outPath, terr = maybeTranscodeForRequest(r.Context(), outPath, q) outPath, terr = maybeTranscodeForRequest(r.Context(), outPath, res, startSec)
if terr != nil { if terr != nil {
http.Error(w, "transcode failed: "+terr.Error(), http.StatusInternalServerError) writeErr(http.StatusInternalServerError, "transcode failed: "+terr.Error())
return return
} }
} }
@ -464,18 +638,50 @@ func (fw flushWriter) Write(p []byte) (int, error) {
return n, err return n, err
} }
func isClientDisconnectErr(err error) bool {
if err == nil {
return false
}
if errors.Is(err, context.Canceled) || errors.Is(err, net.ErrClosed) || errors.Is(err, io.ErrClosedPipe) {
return true
}
// Windows / net/http typische Fälle
var op *net.OpError
if errors.As(err, &op) {
// op.Err kann syscall.Errno(10054/10053/...) sein
if se, ok := op.Err.(syscall.Errno); ok {
switch int(se) {
case 10054, 10053, 10058: // WSAECONNRESET, WSAECONNABORTED, WSAESHUTDOWN
return true
}
}
}
msg := strings.ToLower(err.Error())
if strings.Contains(msg, "broken pipe") ||
strings.Contains(msg, "connection reset") ||
strings.Contains(msg, "forcibly closed") ||
strings.Contains(msg, "wsasend") ||
strings.Contains(msg, "wsarecv") {
return true
}
return false
}
func serveTranscodedStream(ctx context.Context, w http.ResponseWriter, inPath string, prof TranscodeProfile) error { func serveTranscodedStream(ctx context.Context, w http.ResponseWriter, inPath string, prof TranscodeProfile) error {
return serveTranscodedStreamAt(ctx, w, inPath, prof, 0)
}
func serveTranscodedStreamAt(ctx context.Context, w http.ResponseWriter, inPath string, prof TranscodeProfile, startSec int) error {
if err := ensureFFmpegAvailable(); err != nil { if err := ensureFFmpegAvailable(); err != nil {
return err return err
} }
// Header vor dem ersten Write setzen // ffmpeg args (mit -ss vor -i)
w.Header().Set("Content-Type", "video/mp4") args := buildFFmpegStreamArgsAt(inPath, prof, startSec)
w.Header().Set("Cache-Control", "no-store")
// Range macht bei Pipe-Streaming i.d.R. keinen Sinn:
w.Header().Set("Accept-Ranges", "none")
args := buildFFmpegStreamArgs(inPath, prof)
cmd := exec.CommandContext(ctx, "ffmpeg", args...) cmd := exec.CommandContext(ctx, "ffmpeg", args...)
stdout, err := cmd.StdoutPipe() stdout, err := cmd.StdoutPipe()
@ -483,34 +689,74 @@ func serveTranscodedStream(ctx context.Context, w http.ResponseWriter, inPath st
return err return err
} }
var stderr bytes.Buffer stderr, err := cmd.StderrPipe()
cmd.Stderr = &stderr if err != nil {
return err
}
if err := cmd.Start(); err != nil { if err := cmd.Start(); err != nil {
return err return err
} }
defer func() { _ = stdout.Close() }()
flusher, _ := w.(http.Flusher) // stderr MUSS gelesen werden, sonst kann ffmpeg blockieren
fw := flushWriter{w: w, f: flusher} go func() {
_, _ = io.ReadAll(stderr)
_ = cmd.Wait()
}()
buf := make([]byte, 64*1024) w.Header().Set("Cache-Control", "no-store")
_, copyErr := io.CopyBuffer(fw, stdout, buf) w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Accept-Ranges", "none")
w.WriteHeader(http.StatusOK)
waitErr := cmd.Wait() // kontinuierlich flushen
var out io.Writer = w
// Wenn Client abbricht, ist ctx meist canceled -> nicht als "echter" Fehler behandeln if f, ok := w.(http.Flusher); ok {
if ctx.Err() != nil { out = flushWriter{w: w, f: f}
return ctx.Err()
} }
_, copyErr := io.Copy(out, stdout)
// Client abgebrochen -> kein Fehler
if copyErr != nil { if copyErr != nil {
return fmt.Errorf("stream copy failed: %w", copyErr) if isClientDisconnectErr(copyErr) {
return nil
}
} }
if waitErr != nil {
return fmt.Errorf("ffmpeg failed: %w (stderr=%s)", waitErr, strings.TrimSpace(stderr.String())) // Wenn der Request context weg ist: ebenfalls ok (Quality-Wechsel, Seek, Tab zu)
if ctx.Err() != nil && errors.Is(ctx.Err(), context.Canceled) {
return nil
} }
return nil
return copyErr
}
func buildFFmpegStreamArgsAt(inPath string, prof TranscodeProfile, startSec int) []string {
args := buildFFmpegStreamArgs(inPath, prof)
if startSec <= 0 {
return args
}
// Insert "-ss <sec>" before "-i"
out := make([]string, 0, len(args)+2)
inserted := false
for i := 0; i < len(args); i++ {
if !inserted && args[i] == "-i" {
out = append(out, "-ss", strconv.Itoa(startSec))
inserted = true
}
out = append(out, args[i])
}
// Fallback: falls "-i" nicht gefunden wird, häng's vorne dran
if !inserted {
return append([]string{"-ss", strconv.Itoa(startSec)}, args...)
}
return out
} }
func recordStatus(w http.ResponseWriter, r *http.Request) { func recordStatus(w http.ResponseWriter, r *http.Request) {
@ -1146,10 +1392,8 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) {
} }
} }
// 2) Fallback: RAM-Cache only (immer noch schnell, kein ffprobe) // ✅ Kein Cache-only Fallback hier.
if dur <= 0 { // Wenn meta fehlt, bleibt dur erstmal 0 und wird beim Ausliefern (Pagination) via ensureVideoMetaForFileBestEffort erzeugt.
dur = durationSecondsCacheOnly(full, fi)
}
ended := t ended := t
mk := modelFromFullPath(full) mk := modelFromFullPath(full)
@ -1357,8 +1601,43 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) {
// Response jobs bauen // Response jobs bauen
out := make([]*RecordJob, 0, max(0, end-start)) out := make([]*RecordJob, 0, max(0, end-start))
for _, i := range idx[start:end] {
out = append(out, items[i].job) for _, ii := range idx[start:end] {
base := items[ii].job
if base == nil {
continue
}
// ✅ Kopie erzeugen (wichtig: keine Race/Mutations am Cache-Objekt)
c := *base
// ✅ Meta immer aus meta.json (ggf. generieren, wenn fehlt)
// Kurzes Timeout pro Item, damit eine Seite nicht "hängen" kann.
pctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
m, ok := ensureVideoMetaForFileBestEffort(pctx, c.Output, c.SourceURL)
cancel()
// Wenn Meta ok: Felder IMMER daraus setzen
if ok && m != nil {
c.Meta = m
c.DurationSeconds = m.DurationSeconds
c.SizeBytes = m.FileSize
c.VideoWidth = m.VideoWidth
c.VideoHeight = m.VideoHeight
c.FPS = m.FPS
// SourceURL: wenn Job leer, aus Meta übernehmen
if strings.TrimSpace(c.SourceURL) == "" && strings.TrimSpace(m.SourceURL) != "" {
c.SourceURL = strings.TrimSpace(m.SourceURL)
}
} else {
// Falls wirklich gar keine Meta gebaut werden kann: wenigstens Size korrekt setzen
if fi, err := os.Stat(c.Output); err == nil && fi != nil && !fi.IsDir() && fi.Size() > 0 {
c.SizeBytes = fi.Size()
}
}
out = append(out, &c)
} }
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
@ -1490,6 +1769,16 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) {
} }
} }
// ✅ NEU: auch Transcode-Cache zum endgültig gelöschten Video entfernen
if prevCanonical != "" {
removeTranscodesForID(doneAbs, prevCanonical)
// Best-effort (falls irgendwo doch mal abweichende IDs genutzt wurden)
if prevBase != "" && prevBase != prevCanonical {
removeTranscodesForID(doneAbs, stripHotPrefix(prevBase))
}
}
if err := os.MkdirAll(trashDir, 0o755); err != nil { if err := os.MkdirAll(trashDir, 0o755); err != nil {
http.Error(w, "trash dir erstellen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) http.Error(w, "trash dir erstellen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return return

View File

@ -27,15 +27,15 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
rangeFor := func(ph string) rng { rangeFor := func(ph string) rng {
switch ph { switch ph {
case "postwork": case "postwork":
return rng{70, 72} return rng{0, 5}
case "remuxing": case "remuxing":
return rng{72, 78} return rng{5, 65}
case "moving": case "moving":
return rng{78, 84} return rng{65, 75}
case "probe": case "probe":
return rng{84, 86} return rng{75, 80}
case "assets": case "assets":
return rng{86, 99} return rng{80, 99}
default: default:
return rng{0, 100} return rng{0, 100}
} }
@ -58,6 +58,14 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
job.Phase = phase job.Phase = phase
} }
// ✅ Sonderfall: "wartet auf Nachbearbeitung" => Progress bleibt 0%
// Erwartung: Caller sendet phase="postwork" und pct=0 solange nur gewartet wird.
// Muss vor "niemals rückwärts" passieren, sonst käme man von Recording-Progress nicht mehr auf 0.
if phaseLower == "postwork" && pct == 0 {
job.Progress = 0
return
}
// Progress-Logik: // Progress-Logik:
// - wenn wir in Postwork sind und jemand phasenlokale 0..100 liefert (z.B. remuxing 25), // - wenn wir in Postwork sind und jemand phasenlokale 0..100 liefert (z.B. remuxing 25),
// mappe das in den globalen Bereich der Phase. // mappe das in den globalen Bereich der Phase.
@ -66,20 +74,25 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
if inPostwork { if inPostwork {
r := rangeFor(phaseLower) r := rangeFor(phaseLower)
if r.start > 0 && r.end >= r.start { if r.end >= r.start {
// Wenn pct kleiner ist als unser globaler Einstiegspunkt, interpretieren wir ihn als lokal (0..100) // Heuristik:
// und mappen in [start..end]. // - Wenn pct bereits im globalen Bereich der Phase liegt => als global interpretieren, clampen.
if pct < r.start { // - Sonst => als lokales 0..100 interpretieren und in [start..end] mappen.
if pct >= r.start && pct <= r.end {
// schon global
mapped = pct
} else {
// lokal 0..100 -> global
width := float64(r.end - r.start) width := float64(r.end - r.start)
mapped = r.start + int(math.Round((float64(pct)/100.0)*width)) mapped = r.start + int(math.Round((float64(pct)/100.0)*width))
} else { }
// Wenn schon "global" geliefert wird, trotzdem in den Bereich begrenzen
if mapped < r.start { // clamp in den Bereich
mapped = r.start if mapped < r.start {
} mapped = r.start
if mapped > r.end { }
mapped = r.end if mapped > r.end {
} mapped = r.end
} }
} }
} }

View File

@ -107,7 +107,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
} }
// ✅ Phase für Recording explizit setzen (damit spätere Progress-Writer das erkennen können) // ✅ Phase für Recording explizit setzen (damit spätere Progress-Writer das erkennen können)
setJobProgress(job, "recording", 1) setJobProgress(job, "recording", 0)
notifyJobsChanged() notifyJobsChanged()
// ---- Aufnahme starten (Output-Pfad sauber relativ zur EXE auflösen) ---- // ---- Aufnahme starten (Output-Pfad sauber relativ zur EXE auflösen) ----
@ -173,6 +173,10 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
err = errors.New("unsupported provider") err = errors.New("unsupported provider")
} }
if err != nil && shouldLogRecordError(err, provider, req) {
fmt.Println("❌ [record]", provider, job.SourceURL, "->", err)
}
// ---- Recording fertig: EndedAt/Error setzen ---- // ---- Recording fertig: EndedAt/Error setzen ----
end := time.Now() end := time.Now()
@ -201,10 +205,12 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
// ✅ WICHTIG: sofort Phase wechseln, damit Recorder-Progress danach nichts mehr “zurücksetzt” // ✅ WICHTIG: sofort Phase wechseln, damit Recorder-Progress danach nichts mehr “zurücksetzt”
job.Phase = "postwork" job.Phase = "postwork"
// ✅ Progress darf ab jetzt nicht mehr runtergehen (mind. Einstieg in Postwork) /*
if job.Progress < 70 { // ✅ Progress darf ab jetzt nicht mehr runtergehen (mind. Einstieg in Postwork)
job.Progress = 70 if job.Progress < 70 {
} job.Progress = 70
}
*/
out := strings.TrimSpace(job.Output) out := strings.TrimSpace(job.Output)
jobsMu.Unlock() jobsMu.Unlock()
@ -256,7 +262,10 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
notifyJobsChanged() notifyJobsChanged()
notifyDoneChanged() notifyDoneChanged()
fmt.Println("🧹 auto-deleted (pre-queue):", base, "| size:", formatBytesSI(fi.Size())) if shouldLogRecordInfo(req) {
fmt.Println("🧹 auto-deleted (pre-queue):", base, "(size: "+formatBytesSI(fi.Size())+")")
}
return return
} else { } else {
fmt.Println("⚠️ auto-delete (pre-queue) failed:", derr) fmt.Println("⚠️ auto-delete (pre-queue) failed:", derr)
@ -305,7 +314,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
jobsMu.Unlock() jobsMu.Unlock()
// optisches "queued" bumping // optisches "queued" bumping
setJobProgress(job, "postwork", 71) setJobProgress(job, "postwork", 0)
notifyJobsChanged() notifyJobsChanged()

View File

@ -1,3 +1,5 @@
// backend\record_stream_cb.go
package main package main
import ( import (
@ -49,11 +51,6 @@ func RecordStream(
return fmt.Errorf("playlist abrufen: %w", err) return fmt.Errorf("playlist abrufen: %w", err)
} }
// ✅ Job erst jetzt sichtbar machen (Stream wirklich verfügbar)
if job != nil {
_ = publishJob(job.ID)
}
if job != nil && strings.TrimSpace(job.PreviewDir) == "" { if job != nil && strings.TrimSpace(job.PreviewDir) == "" {
assetID := assetIDForJob(job) assetID := assetIDForJob(job)
if strings.TrimSpace(assetID) == "" { if strings.TrimSpace(assetID) == "" {
@ -75,9 +72,6 @@ func RecordStream(
if err != nil { if err != nil {
return fmt.Errorf("datei erstellen: %w", err) return fmt.Errorf("datei erstellen: %w", err)
} }
if job != nil {
_ = publishJob(job.ID)
}
defer func() { defer func() {
_ = file.Close() _ = file.Close()
@ -88,6 +82,8 @@ func RecordStream(
var lastPush time.Time var lastPush time.Time
var lastBytes int64 var lastBytes int64
published := false
// 5) Segmente „watchen“ analog zu WatchSegments + HandleSegment im DVR // 5) Segmente „watchen“ analog zu WatchSegments + HandleSegment im DVR
err = playlist.WatchSegments(ctx, hc, httpCookie, func(b []byte, duration float64) error { err = playlist.WatchSegments(ctx, hc, httpCookie, func(b []byte, duration float64) error {
// Hier wäre im DVR ch.HandleSegment bei dir einfach in eine Datei schreiben // Hier wäre im DVR ch.HandleSegment bei dir einfach in eine Datei schreiben
@ -95,6 +91,12 @@ func RecordStream(
return fmt.Errorf("schreibe segment: %w", err) return fmt.Errorf("schreibe segment: %w", err)
} }
// ✅ erst sichtbar machen, wenn wirklich Bytes geschrieben wurden
if job != nil && !published {
published = true
_ = publishJob(job.ID)
}
// ✅ live size (UI) throttled // ✅ live size (UI) throttled
written += int64(len(b)) written += int64(len(b))
if job != nil { if job != nil {

View File

@ -1,3 +1,5 @@
// backend\record_stream_mfc.go
package main package main
import ( import (

View File

@ -14,34 +14,36 @@ import (
"time" "time"
) )
func serveVideoFile(w http.ResponseWriter, r *http.Request, path string) { func serveVideoFile(w http.ResponseWriter, r *http.Request, filePath string) {
f, err := openForReadShareDelete(path) f, err := os.Open(filePath)
if err != nil { if err != nil {
http.Error(w, "datei öffnen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) http.Error(w, "open failed: "+err.Error(), http.StatusNotFound)
return return
} }
defer f.Close() defer f.Close()
fi, err := f.Stat() fi, err := f.Stat()
if err != nil || fi.IsDir() || fi.Size() == 0 { if err != nil || fi.IsDir() || fi.Size() <= 0 {
http.Error(w, "datei nicht gefunden", http.StatusNotFound) http.Error(w, "file not found", http.StatusNotFound)
return return
} }
w.Header().Set("Cache-Control", "no-store") ext := strings.ToLower(filepath.Ext(filePath))
w.Header().Set("Accept-Ranges", "bytes")
w.Header().Set("X-Content-Type-Options", "nosniff")
ext := strings.ToLower(filepath.Ext(path))
switch ext { switch ext {
case ".mp4":
w.Header().Set("Content-Type", "video/mp4")
case ".ts": case ".ts":
w.Header().Set("Content-Type", "video/mp2t") w.Header().Set("Content-Type", "video/mp2t")
default: default:
w.Header().Set("Content-Type", "video/mp4") w.Header().Set("Content-Type", "application/octet-stream")
} }
// ServeContent unterstützt Range Requests (wichtig für Video) // Range-Support (http.ServeContent macht 206/Content-Range automatisch, wenn Range kommt)
http.ServeContent(w, r, filepath.Base(path), fi.ModTime(), f) w.Header().Set("Accept-Ranges", "bytes")
w.Header().Set("Cache-Control", "no-store")
// ServeContent setzt Content-Length/Last-Modified/ETag-Handling korrekt
http.ServeContent(w, r, filepath.Base(filePath), fi.ModTime(), f)
} }
func sniffVideoKind(path string) (string, error) { func sniffVideoKind(path string) (string, error) {

325
backend/settings.go Normal file
View File

@ -0,0 +1,325 @@
// backend\settings.go
package main
import (
"encoding/json"
"fmt"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"github.com/sqweek/dialog"
)
type RecorderSettings struct {
RecordDir string `json:"recordDir"`
DoneDir string `json:"doneDir"`
FFmpegPath string `json:"ffmpegPath"`
AutoAddToDownloadList bool `json:"autoAddToDownloadList"`
AutoStartAddedDownloads bool `json:"autoStartAddedDownloads"`
UseChaturbateAPI bool `json:"useChaturbateApi"`
UseMyFreeCamsWatcher bool `json:"useMyFreeCamsWatcher"`
// Wenn aktiv, werden fertige Downloads automatisch gelöscht, wenn sie kleiner als der Grenzwert sind.
AutoDeleteSmallDownloads bool `json:"autoDeleteSmallDownloads"`
AutoDeleteSmallDownloadsBelowMB int `json:"autoDeleteSmallDownloadsBelowMB"`
BlurPreviews bool `json:"blurPreviews"`
TeaserPlayback string `json:"teaserPlayback"` // still | hover | all
TeaserAudio bool `json:"teaserAudio"` // ✅ Vorschau/Teaser mit Ton abspielen
EnableNotifications bool `json:"enableNotifications"`
// EncryptedCookies contains base64(nonce+ciphertext) of a JSON cookie map.
EncryptedCookies string `json:"encryptedCookies"`
}
var (
settingsMu sync.Mutex
settings = RecorderSettings{
RecordDir: "/records",
DoneDir: "/records/done",
FFmpegPath: "",
AutoAddToDownloadList: false,
AutoStartAddedDownloads: false,
UseChaturbateAPI: false,
UseMyFreeCamsWatcher: false,
AutoDeleteSmallDownloads: false,
AutoDeleteSmallDownloadsBelowMB: 50,
BlurPreviews: false,
TeaserPlayback: "hover",
TeaserAudio: false,
EnableNotifications: true,
EncryptedCookies: "",
}
settingsFile = "recorder_settings.json"
)
func settingsFilePath() string {
// optionaler Override per ENV
name := strings.TrimSpace(os.Getenv("RECORDER_SETTINGS_FILE"))
if name == "" {
name = settingsFile
}
// Standard: relativ zur EXE / App-Dir (oder fallback auf Working Dir bei go run)
if p, err := resolvePathRelativeToApp(name); err == nil && strings.TrimSpace(p) != "" {
return p
}
// Fallback: so zurückgeben wie es ist
return name
}
func getSettings() RecorderSettings {
settingsMu.Lock()
defer settingsMu.Unlock()
return settings
}
func loadSettings() {
p := settingsFilePath()
b, err := os.ReadFile(p)
fmt.Println("🔧 settingsFile:", p)
if err == nil {
s := getSettings() // ✅ startet mit Defaults
if json.Unmarshal(b, &s) == nil {
if strings.TrimSpace(s.RecordDir) != "" {
s.RecordDir = filepath.Clean(strings.TrimSpace(s.RecordDir))
}
if strings.TrimSpace(s.DoneDir) != "" {
s.DoneDir = filepath.Clean(strings.TrimSpace(s.DoneDir))
}
if strings.TrimSpace(s.FFmpegPath) != "" {
s.FFmpegPath = strings.TrimSpace(s.FFmpegPath)
}
s.TeaserPlayback = strings.ToLower(strings.TrimSpace(s.TeaserPlayback))
if s.TeaserPlayback == "" {
s.TeaserPlayback = "hover"
}
if s.TeaserPlayback != "still" && s.TeaserPlayback != "hover" && s.TeaserPlayback != "all" {
s.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if s.AutoDeleteSmallDownloadsBelowMB < 0 {
s.AutoDeleteSmallDownloadsBelowMB = 0
}
if s.AutoDeleteSmallDownloadsBelowMB > 100_000 {
s.AutoDeleteSmallDownloadsBelowMB = 100_000
}
settingsMu.Lock()
settings = s
settingsMu.Unlock()
}
}
// Ordner sicherstellen
s := getSettings()
recordAbs, _ := resolvePathRelativeToApp(s.RecordDir)
doneAbs, _ := resolvePathRelativeToApp(s.DoneDir)
if strings.TrimSpace(recordAbs) != "" {
_ = os.MkdirAll(recordAbs, 0o755)
}
if strings.TrimSpace(doneAbs) != "" {
_ = os.MkdirAll(doneAbs, 0o755)
}
// ffmpeg-Pfad anhand Settings/Env/PATH bestimmen
ffmpegPath = detectFFmpegPath()
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
}
func saveSettingsToDisk() {
s := getSettings()
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
fmt.Println("⚠️ settings marshal:", err)
return
}
b = append(b, '\n')
p := settingsFilePath()
if err := atomicWriteFile(p, b); err != nil {
fmt.Println("⚠️ settings write:", err)
return
}
// optional
// fmt.Println("✅ settings saved:", p)
}
func recordSettingsHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
case http.MethodPost:
var in RecorderSettings
if err := json.NewDecoder(r.Body).Decode(&in); err != nil {
http.Error(w, "invalid json: "+err.Error(), http.StatusBadRequest)
return
}
// --- normalize (WICHTIG: erst trim, dann leer-check, dann clean) ---
recRaw := strings.TrimSpace(in.RecordDir)
doneRaw := strings.TrimSpace(in.DoneDir)
if recRaw == "" || doneRaw == "" {
http.Error(w, "recordDir und doneDir dürfen nicht leer sein", http.StatusBadRequest)
return
}
in.RecordDir = filepath.Clean(recRaw)
in.DoneDir = filepath.Clean(doneRaw)
// Optional aber sehr empfehlenswert: "." verbieten
if in.RecordDir == "." || in.DoneDir == "." {
http.Error(w, "recordDir/doneDir dürfen nicht '.' sein", http.StatusBadRequest)
return
}
in.FFmpegPath = strings.TrimSpace(in.FFmpegPath)
in.TeaserPlayback = strings.ToLower(strings.TrimSpace(in.TeaserPlayback))
if in.TeaserPlayback == "" {
in.TeaserPlayback = "hover"
}
if in.TeaserPlayback != "still" && in.TeaserPlayback != "hover" && in.TeaserPlayback != "all" {
in.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if in.AutoDeleteSmallDownloadsBelowMB < 0 {
in.AutoDeleteSmallDownloadsBelowMB = 0
}
if in.AutoDeleteSmallDownloadsBelowMB > 100_000 {
in.AutoDeleteSmallDownloadsBelowMB = 100_000
}
// --- ensure folders (Fehler zurückgeben, falls z.B. keine Rechte) ---
recAbs, err := resolvePathRelativeToApp(in.RecordDir)
if err != nil {
http.Error(w, "ungültiger recordDir: "+err.Error(), http.StatusBadRequest)
return
}
doneAbs, err := resolvePathRelativeToApp(in.DoneDir)
if err != nil {
http.Error(w, "ungültiger doneDir: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(recAbs, 0o755); err != nil {
http.Error(w, "konnte recordDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(doneAbs, 0o755); err != nil {
http.Error(w, "konnte doneDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
// ✅ Settings im RAM aktualisieren
settingsMu.Lock()
settings = in
settingsMu.Unlock()
// ✅ Settings auf Disk persistieren
saveSettingsToDisk()
// ✅ ffmpeg/ffprobe nach Änderungen neu bestimmen
// Tipp: wenn der User FFmpegPath explizit setzt, nutze den direkt.
if strings.TrimSpace(in.FFmpegPath) != "" {
ffmpegPath = in.FFmpegPath
} else {
ffmpegPath = detectFFmpegPath()
}
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
default:
http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed)
return
}
}
func settingsBrowse(w http.ResponseWriter, r *http.Request) {
target := r.URL.Query().Get("target")
if target != "record" && target != "done" && target != "ffmpeg" {
http.Error(w, "target muss record, done oder ffmpeg sein", http.StatusBadRequest)
return
}
var (
p string
err error
)
if target == "ffmpeg" {
// Dateiauswahl für ffmpeg.exe
p, err = dialog.File().
Title("ffmpeg.exe auswählen").
Load()
} else {
// Ordnerauswahl für record/done
p, err = dialog.Directory().
Title("Ordner auswählen").
Browse()
}
if err != nil {
// User cancelled → 204 No Content ist praktisch fürs Frontend
if strings.Contains(strings.ToLower(err.Error()), "cancel") {
w.WriteHeader(http.StatusNoContent)
return
}
http.Error(w, "auswahl fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
// optional: wenn innerhalb exe-dir, als RELATIV zurückgeben
p = maybeMakeRelativeToExe(p)
w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(map[string]string{"path": p})
}
func maybeMakeRelativeToExe(abs string) string {
exe, err := os.Executable()
if err != nil {
return abs
}
base := filepath.Dir(exe)
rel, err := filepath.Rel(base, abs)
if err != nil {
return abs
}
// wenn rel mit ".." beginnt -> nicht innerhalb base -> absoluten Pfad behalten
if rel == "." || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
return abs
}
return filepath.ToSlash(rel) // frontend-freundlich
}

View File

@ -10,6 +10,7 @@ import (
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"regexp"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@ -37,6 +38,66 @@ type heightCacheEntry struct {
var heightCacheMu sync.Mutex var heightCacheMu sync.Mutex
var heightCache = map[string]heightCacheEntry{} var heightCache = map[string]heightCacheEntry{}
type durationCacheEntry struct {
mtime time.Time
size int64
dur float64
}
var durationCacheMu sync.Mutex
var durationCache = map[string]durationCacheEntry{}
func probeVideoDurationSeconds(ctx context.Context, inPath string) (float64, error) {
// ffprobe -v error -show_entries format=duration -of csv=p=0 <file>
cmd := exec.CommandContext(ctx, "ffprobe",
"-v", "error",
"-show_entries", "format=duration",
"-of", "csv=p=0",
inPath,
)
out, err := cmd.Output()
if err != nil {
return 0, err
}
s := strings.TrimSpace(string(out))
if s == "" {
return 0, fmt.Errorf("ffprobe returned empty duration")
}
d, err := strconv.ParseFloat(s, 64)
if err != nil || d <= 0 {
return 0, fmt.Errorf("bad duration %q", s)
}
return d, nil
}
func getVideoDurationSecondsCached(ctx context.Context, inPath string) (float64, error) {
fi, err := os.Stat(inPath)
if err != nil || fi.IsDir() || fi.Size() <= 0 {
return 0, fmt.Errorf("input not usable")
}
durationCacheMu.Lock()
if e, ok := durationCache[inPath]; ok {
if e.size == fi.Size() && e.mtime.Equal(fi.ModTime()) && e.dur > 0 {
d := e.dur
durationCacheMu.Unlock()
return d, nil
}
}
durationCacheMu.Unlock()
d, err := probeVideoDurationSeconds(ctx, inPath)
if err != nil {
return 0, err
}
durationCacheMu.Lock()
durationCache[inPath] = durationCacheEntry{mtime: fi.ModTime(), size: fi.Size(), dur: d}
durationCacheMu.Unlock()
return d, nil
}
func probeVideoHeight(ctx context.Context, inPath string) (int, error) { func probeVideoHeight(ctx context.Context, inPath string) (int, error) {
// ffprobe -v error -select_streams v:0 -show_entries stream=height -of csv=p=0 <file> // ffprobe -v error -select_streams v:0 -show_entries stream=height -of csv=p=0 <file>
cmd := exec.CommandContext(ctx, "ffprobe", cmd := exec.CommandContext(ctx, "ffprobe",
@ -93,27 +154,40 @@ type TranscodeProfile struct {
Height int Height int
} }
func profileFromQuality(q string) (TranscodeProfile, bool) { func profileFromResolution(res string) (TranscodeProfile, bool) {
switch strings.ToLower(strings.TrimSpace(q)) { // Stash-like: LOW | MEDIUM | HIGH | ORIGINAL (case-insensitive)
case "", "auto": s := strings.ToUpper(strings.TrimSpace(res))
return TranscodeProfile{Name: "auto", Height: 0}, true switch s {
case "2160p": case "", "ORIGINAL", "SOURCE", "AUTO":
return TranscodeProfile{Name: "2160p", Height: 2160}, true return TranscodeProfile{Name: "ORIGINAL", Height: 0}, true
case "1080p": case "LOW":
return TranscodeProfile{Name: "1080p", Height: 1080}, true return TranscodeProfile{Name: "LOW", Height: 480}, true
case "720p": case "MEDIUM":
return TranscodeProfile{Name: "720p", Height: 720}, true return TranscodeProfile{Name: "MEDIUM", Height: 720}, true
case "480p": case "HIGH":
return TranscodeProfile{Name: "480p", Height: 480}, true return TranscodeProfile{Name: "HIGH", Height: 1080}, true
default:
return TranscodeProfile{}, false
} }
// Backwards-Kompatibilität: "<height>p" (z.B. 720p)
s2 := strings.ToLower(strings.TrimSpace(res))
if m := regexp.MustCompile(`^(\d{3,4})p$`).FindStringSubmatch(s2); m != nil {
h, err := strconv.Atoi(m[1])
if err != nil || h <= 0 {
return TranscodeProfile{}, false
}
if h < 144 || h > 4320 {
return TranscodeProfile{}, false
}
return TranscodeProfile{Name: fmt.Sprintf("%dp", h), Height: h}, true
}
return TranscodeProfile{}, false
} }
// Cache layout: <doneAbs>/.transcodes/<canonicalID>/<quality>.mp4 // Cache layout: <doneAbs>/.transcodes/<canonicalID>/<v>/<quality>/s<start>.mp4
func transcodeCachePath(doneAbs, canonicalID, quality string) string { func transcodeCachePath(doneAbs, canonicalID, quality string, startSec int) string {
const v = "v1" const v = "v2"
return filepath.Join(doneAbs, ".transcodes", canonicalID, v, quality+".mp4") return filepath.Join(doneAbs, ".transcodes", canonicalID, v, quality, fmt.Sprintf("s%d.mp4", startSec))
} }
func ensureFFmpegAvailable() error { func ensureFFmpegAvailable() error {
@ -204,15 +278,21 @@ func runFFmpeg(ctx context.Context, args []string) error {
// Public entry used by recordVideo // Public entry used by recordVideo
// ------------------------- // -------------------------
// maybeTranscodeForRequest inspects "quality" query param. // maybeTranscodeForRequest inspects "resolution" query param.
// If quality is "auto" (or empty), it returns original outPath unchanged. // If quality is "auto" (or empty), it returns original outPath unchanged.
// Otherwise it ensures cached transcode exists & is fresh, and returns the cached path. // Otherwise it ensures cached transcode exists & is fresh, and returns the cached path.
func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality string) (string, error) { func maybeTranscodeForRequest(rctx context.Context, originalPath string, resolution string, startSec int) (string, error) {
prof, ok := profileFromQuality(quality) if startSec < 0 {
if !ok { startSec = 0
return "", fmt.Errorf("bad quality %q", quality)
} }
if prof.Name == "auto" { // optional: auf 2 Sekunden runter runden, passt zu GOP=60 (~2s bei 30fps)
startSec = (startSec / 2) * 2
prof, ok := profileFromResolution(resolution)
if !ok {
return "", fmt.Errorf("bad resolution %q", resolution)
}
if strings.EqualFold(prof.Name, "ORIGINAL") || prof.Height <= 0 {
return originalPath, nil return originalPath, nil
} }
@ -221,18 +301,22 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
return "", err return "", err
} }
// optional: skip transcode if source is already <= requested height (prevents upscaling) needScale := true
if prof.Height > 0 { if prof.Height > 0 {
// ffprobe is needed only for this optimization
if err := ensureFFprobeAvailable(); err == nil { if err := ensureFFprobeAvailable(); err == nil {
// short timeout for probing
pctx, cancel := context.WithTimeout(rctx, 5*time.Second) pctx, cancel := context.WithTimeout(rctx, 5*time.Second)
defer cancel() defer cancel()
if srcH, err := getVideoHeightCached(pctx, originalPath); err == nil && srcH > 0 { if srcH, err := getVideoHeightCached(pctx, originalPath); err == nil && srcH > 0 {
// if source is already at/below requested (with tiny tolerance), don't transcode // Quelle <= Ziel => kein Downscale nötig
if srcH <= prof.Height+8 { if srcH <= prof.Height+8 {
return originalPath, nil needScale = false
// ✅ WICHTIG: wenn startSec==0, liefern wir wirklich Original (keine Cache-Datei bauen)
if startSec == 0 {
return originalPath, nil
}
} }
} }
} }
@ -254,7 +338,8 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
return "", fmt.Errorf("canonical id empty") return "", fmt.Errorf("canonical id empty")
} }
cacheOut := transcodeCachePath(doneAbs, canonicalID, prof.Name) qualityKey := strings.ToLower(strings.TrimSpace(prof.Name))
cacheOut := transcodeCachePath(doneAbs, canonicalID, qualityKey, startSec)
// fast path: already exists & fresh // fast path: already exists & fresh
if isCacheFresh(originalPath, cacheOut) { if isCacheFresh(originalPath, cacheOut) {
@ -293,7 +378,13 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
_ = os.Remove(tmp) _ = os.Remove(tmp)
// ffmpeg args // ffmpeg args
args := buildFFmpegArgs(originalPath, tmp, prof) var args []string
if needScale {
args = buildFFmpegArgs(originalPath, tmp, prof, startSec)
} else {
// ✅ nativer Seek: schneiden ohne re-encode
args = buildFFmpegCopySegmentArgs(originalPath, tmp, startSec)
}
if err := runFFmpeg(ctx, args); err != nil { if err := runFFmpeg(ctx, args); err != nil {
_ = os.Remove(tmp) _ = os.Remove(tmp)
@ -335,18 +426,27 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
// ffmpeg profiles // ffmpeg profiles
// ------------------------- // -------------------------
func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile) []string { func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile, startSec int) []string {
// You can tune these defaults: // You can tune these defaults:
// - CRF: lower => better quality, bigger file (1080p ~22, 720p ~23, 480p ~24/25) // - CRF: lower => better quality, bigger file (1080p ~22, 720p ~23, 480p ~24/25)
// - preset: veryfast is good for on-demand // - preset: veryfast is good for on-demand
crf := "23" crf := "23"
switch prof.Name { h := prof.Height
case "1080p": switch {
case h >= 2160:
crf = "20"
case h >= 1440:
crf = "21"
case h >= 1080:
crf = "22" crf = "22"
case "720p": case h >= 720:
crf = "23" crf = "23"
case "480p": case h >= 480:
crf = "25" crf = "25"
case h >= 360:
crf = "27"
default:
crf = "29"
} }
// Keyframes: choose a stable value; if you want dynamic based on fps you can extend later. // Keyframes: choose a stable value; if you want dynamic based on fps you can extend later.
@ -359,12 +459,27 @@ func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile) []string {
// scale keeps aspect ratio, ensures even width // scale keeps aspect ratio, ensures even width
vf := fmt.Sprintf("scale=-2:%d", prof.Height) vf := fmt.Sprintf("scale=-2:%d", prof.Height)
return []string{ // sanitize start
if startSec < 0 {
startSec = 0
}
// optional: align to small buckets to reduce cache fragmentation (and match GOP-ish seeking)
// startSec = (startSec / 2) * 2
args := []string{
"-hide_banner", "-hide_banner",
"-loglevel", "error", "-loglevel", "error",
"-nostdin", "-nostdin",
"-y", "-y",
}
// ✅ Startposition: VOR "-i" => schnelles Seek zum nächsten Keyframe (gut für on-demand)
// (Wenn du frame-genau willst: "-ss" NACH "-i", ist aber deutlich langsamer.)
if startSec > 0 {
args = append(args, "-ss", strconv.Itoa(startSec))
}
args = append(args,
"-i", inPath, "-i", inPath,
// ✅ robust: falls Audio fehlt, trotzdem kein Fehler // ✅ robust: falls Audio fehlt, trotzdem kein Fehler
@ -394,58 +509,105 @@ func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile) []string {
"-movflags", movflags, "-movflags", movflags,
outPath, outPath,
} )
return args
} }
func buildFFmpegStreamArgs(inPath string, prof TranscodeProfile) []string { func buildFFmpegCopySegmentArgs(inPath, outPath string, startSec int) []string {
crf := "23" args := []string{
switch prof.Name {
case "1080p":
crf = "22"
case "720p":
crf = "23"
case "480p":
crf = "25"
}
gop := "60"
vf := fmt.Sprintf("scale=-2:%d", prof.Height)
movflags := "frag_keyframe+empty_moov+default_base_moof"
return []string{
"-hide_banner", "-hide_banner",
"-loglevel", "error", "-loglevel", "error",
"-nostdin", "-nostdin",
"-y", "-y",
"-i", inPath, }
// ✅ robust (wie im File-Transcode) if startSec > 0 {
args = append(args, "-ss", strconv.Itoa(startSec))
}
args = append(args,
"-i", inPath,
"-map", "0:v:0?", "-map", "0:v:0?",
"-map", "0:a:0?", "-map", "0:a:0?",
"-sn", "-sn",
"-vf", vf, // ✅ kein re-encode
"-c", "copy",
// ✅ fürs normale File: moov nach vorne
"-movflags", "+faststart",
outPath,
)
return args
}
func buildFFmpegStreamArgs(inPath string, prof TranscodeProfile) []string {
// Stash streamt MP4 als fragmented MP4 mit empty_moov
// (kein default_base_moof für "plain mp4 stream").
movflags := "frag_keyframe+empty_moov"
// Stash-ähnliche CRF-Werte
crf := "25"
switch strings.ToUpper(strings.TrimSpace(prof.Name)) {
case "HIGH", "1080P":
crf = "23"
case "MEDIUM", "720P":
crf = "25"
case "LOW", "480P":
crf = "27"
}
args := []string{
"-hide_banner",
"-loglevel", "error",
"-nostdin",
// "-y" ist bei pipe egal, kann aber bleiben ich lasse es weg wie im Beispiel
}
// Input
args = append(args, "-i", inPath)
// robust: Video/Audio optional
args = append(args,
"-map", "0:v:0?",
"-map", "0:a:0?",
"-sn",
)
// Scale nur wenn wir wirklich runterskalieren wollen
if prof.Height > 0 {
vf := fmt.Sprintf("scale=-2:%d", prof.Height)
args = append(args, "-vf", vf)
}
// Video
args = append(args,
"-c:v", "libx264", "-c:v", "libx264",
"-preset", "veryfast", "-preset", "veryfast",
"-crf", crf, "-crf", crf,
"-pix_fmt", "yuv420p", "-pix_fmt", "yuv420p",
"-max_muxing_queue_size", "1024",
"-g", gop,
"-keyint_min", gop,
"-sc_threshold", "0", "-sc_threshold", "0",
"-max_muxing_queue_size", "1024",
)
// Audio (nur wenn vorhanden wegen map 0:a:0?)
args = append(args,
"-c:a", "aac", "-c:a", "aac",
"-b:a", "128k", "-b:a", "128k",
"-ac", "2", "-ac", "2",
)
// MP4 stream flags
args = append(args,
"-movflags", movflags, "-movflags", movflags,
"-f", "mp4", "-f", "mp4",
"pipe:1", "pipe:", // wichtig: wie im Beispiel
} )
return args
} }
// ------------------------- // -------------------------

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,8 +5,8 @@
<link rel="icon" type="image/svg+xml" href="/vite.svg" /> <link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" /> <meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
<title>App</title> <title>App</title>
<script type="module" crossorigin src="/assets/index-DV6ZfOPf.js"></script> <script type="module" crossorigin src="/assets/index-DlgYo3oN.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-BRCxVTHL.css"> <link rel="stylesheet" crossorigin href="/assets/index-SqYhLYXQ.css">
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>

View File

@ -156,6 +156,75 @@ function getProviderFromNormalizedUrl(normUrl: string): Provider | null {
} }
} }
function chaturbateUserFromUrl(normUrl: string): string {
try {
const u = new URL(normUrl)
const host = u.hostname.replace(/^www\./i, '').toLowerCase()
if (host !== 'chaturbate.com' && !host.endsWith('.chaturbate.com')) return ''
// https://chaturbate.com/<name>/...
const parts = u.pathname.split('/').filter(Boolean)
return parts[0] ? decodeURIComponent(parts[0]).trim() : ''
} catch {
return ''
}
}
/**
* Macht aus "beliebigen" Provider-URLs eine EINDEUTIGE Standardform.
* -> wichtig für dedupe (Queue, alreadyRunning), Clipboard, Pending-Maps.
*/
function canonicalizeProviderUrl(normUrl: string): string {
const provider = getProviderFromNormalizedUrl(normUrl)
if (!provider) return normUrl
if (provider === 'chaturbate') {
const name = chaturbateUserFromUrl(normUrl)
return name ? `https://chaturbate.com/${encodeURIComponent(name)}/` : normUrl
}
// provider === 'mfc'
const name = mfcUserFromUrl(normUrl)
// Standardisiere auf EIN Format (hier: #<name>)
return name ? `https://www.myfreecams.com/#${encodeURIComponent(name)}` : normUrl
}
/** Gibt den "ModelKey" aus einer URL zurück (lowercased) für beide Provider */
function providerKeyLowerFromUrl(normUrl: string): string {
const provider = getProviderFromNormalizedUrl(normUrl)
if (!provider) return ''
const raw = provider === 'chaturbate' ? chaturbateUserFromUrl(normUrl) : mfcUserFromUrl(normUrl)
return (raw || '').trim().toLowerCase()
}
function mfcUserFromUrl(normUrl: string): string {
try {
const u = new URL(normUrl)
const host = u.hostname.replace(/^www\./i, '').toLowerCase()
// nur MFC
if (host !== 'myfreecams.com' && !host.endsWith('.myfreecams.com')) return ''
// typische MFC Profile-URLs:
// https://www.myfreecams.com/#<name>
// https://www.myfreecams.com/#/models/<name>
// https://www.myfreecams.com/<name> (seltener)
const hash = (u.hash || '').replace(/^#\/?/, '') // "#/models/foo" -> "models/foo"
if (hash) {
const parts = hash.split('/').filter(Boolean)
const last = parts[parts.length - 1] || ''
if (last) return decodeURIComponent(last).trim()
}
const parts = u.pathname.split('/').filter(Boolean)
const last = parts[parts.length - 1] || ''
return last ? decodeURIComponent(last).trim() : ''
} catch {
return ''
}
}
const baseName = (p: string) => (p || '').replaceAll('\\', '/').split('/').pop() || '' const baseName = (p: string) => (p || '').replaceAll('\\', '/').split('/').pop() || ''
function replaceBasename(fullPath: string, newBase: string) { function replaceBasename(fullPath: string, newBase: string) {
@ -232,6 +301,8 @@ export default function App() {
setCbOnlineByKeyLower({}) setCbOnlineByKeyLower({})
cbOnlineByKeyLowerRef.current = {} cbOnlineByKeyLowerRef.current = {}
startedToastByJobIdRef.current = {}
jobsInitDoneRef.current = false
setPendingWatchedRooms([]) setPendingWatchedRooms([])
setPendingAutoStartByKey({}) setPendingAutoStartByKey({})
@ -248,6 +319,50 @@ export default function App() {
const notify = useNotify() const notify = useNotify()
const notifyRef = useRef(notify)
// ✅ Dedupe für "Cookies fehlen" Meldung (damit silent/autostarts nicht spammen)
const cookieProblemLastAtRef = useRef(0)
const isCookieGateError = (msg: string) => {
const m = (msg || '').toLowerCase()
return (
m.includes('altersverifikationsseite erhalten') ||
m.includes('verify your age') ||
m.includes('schutzseite von cloudflare erhalten') ||
m.includes('just a moment') ||
m.includes('kein room-html')
)
}
const showMissingCookiesMessage = (opts?: { silent?: boolean }) => {
const silent = Boolean(opts?.silent)
const title = 'Cookies fehlen oder sind abgelaufen'
const body =
'Der Recorder hat statt des Room-HTML eine Schutz-/Altersverifikationsseite erhalten. ' +
'Bitte Cookies aktualisieren (bei Chaturbate z.B. cf_clearance + sessionId) und erneut starten.'
// Wenn Nutzer aktiv klickt: oben als Error-Box zeigen + Cookie-Modal anbieten
if (!silent) {
setError(`⚠️ ${title}. ${body}`)
// optional aber hilfreich: Modal direkt öffnen
setCookieModalOpen(true)
return
}
// Bei silent (Auto-Start / Queue): nur selten Toast
const now = Date.now()
if (now - cookieProblemLastAtRef.current > 15_000) {
cookieProblemLastAtRef.current = now
notifyRef.current?.error(title, body)
}
}
useEffect(() => {
notifyRef.current = notify
}, [notify])
// ✅ Perf: PerformanceMonitor erst nach initialer Render/Hydration anzeigen // ✅ Perf: PerformanceMonitor erst nach initialer Render/Hydration anzeigen
const [showPerfMon, setShowPerfMon] = useState(false) const [showPerfMon, setShowPerfMon] = useState(false)
@ -366,14 +481,38 @@ export default function App() {
useEffect(() => { useEffect(() => {
const onOpen = (ev: Event) => { const onOpen = (ev: Event) => {
const e = ev as CustomEvent<{ modelKey?: string }> const e = ev as CustomEvent<{ modelKey?: string }>
const raw = (e.detail?.modelKey ?? '').trim() const raw0 = (e.detail?.modelKey ?? '').trim()
if (!raw0) return
let k = raw.replace(/^https?:\/\//i, '') // 1) Wenn es "nur ein Key" ist (z.B. maypeach), direkt übernehmen
if (k.includes('/')) k = k.split('/').filter(Boolean).pop() || k // Heuristik: keine Spaces, keine Slashes -> sehr wahrscheinlich Key
if (k.includes(':')) k = k.split(':').pop() || k const looksLikeKey =
k = k.trim().toLowerCase() !raw0.includes(' ') &&
!raw0.includes('/') &&
!raw0.includes('\\')
if (k) setDetailsModelKey(k) if (looksLikeKey) {
const k = raw0.replace(/^@/, '').trim().toLowerCase()
if (k) setDetailsModelKey(k)
return
}
// 2) Sonst: URL/Path normalisieren + Provider-Key extrahieren
const norm0 = normalizeHttpUrl(raw0)
if (!norm0) {
// Fallback auf alte Key-Logik (falls raw sowas wie "chaturbate.com/im_jasmine" ist)
let k = raw0.replace(/^https?:\/\//i, '')
if (k.includes('/')) k = k.split('/').filter(Boolean).pop() || k
if (k.includes(':')) k = k.split(':').pop() || k
k = k.trim().toLowerCase()
if (k) setDetailsModelKey(k)
return
}
const norm = canonicalizeProviderUrl(norm0)
const keyLower = providerKeyLowerFromUrl(norm)
if (keyLower) setDetailsModelKey(keyLower)
} }
window.addEventListener('open-model-details', onOpen as any) window.addEventListener('open-model-details', onOpen as any)
@ -479,6 +618,11 @@ export default function App() {
const busyRef = useRef(false) const busyRef = useRef(false)
const cookiesRef = useRef<Record<string, string>>({}) const cookiesRef = useRef<Record<string, string>>({})
const jobsRef = useRef<RecordJob[]>([]) const jobsRef = useRef<RecordJob[]>([])
// ✅ "Job gestartet" Toast: dedupe (auch gegen SSE/polling) + initial-load suppression
const startedToastByJobIdRef = useRef<Record<string, true>>({})
const jobsInitDoneRef = useRef(false)
useEffect(() => { useEffect(() => {
busyRef.current = busy busyRef.current = busy
}, [busy]) }, [busy])
@ -493,9 +637,163 @@ export default function App() {
const pendingStartUrlRef = useRef<string | null>(null) const pendingStartUrlRef = useRef<string | null>(null)
const lastClipboardUrlRef = useRef<string>('') const lastClipboardUrlRef = useRef<string>('')
// --- START QUEUE (parallel) ---
const START_CONCURRENCY = 4 // ⬅️ kannst du höher setzen, aber 4 ist ein guter Start
type StartQueueItem = {
url: string
silent: boolean
pendingKeyLower?: string // wenn aus pendingAutoStartByKey kommt
}
const startQueueRef = useRef<StartQueueItem[]>([])
const startInFlightRef = useRef(0)
const startQueuedSetRef = useRef<Set<string>>(new Set()) // dedupe: verhindert Duplikate
const pumpStartQueueScheduledRef = useRef(false)
const setBusyFromStarts = useCallback(() => {
const v = startInFlightRef.current > 0
setBusy(v)
busyRef.current = v
}, [])
const enqueueStart = useCallback(
(item: StartQueueItem) => {
const norm0 = normalizeHttpUrl(item.url)
if (!norm0) return false
const norm = canonicalizeProviderUrl(norm0)
// dedupe: gleiche URL nicht 100x in die Queue
if (startQueuedSetRef.current.has(norm)) return true
startQueuedSetRef.current.add(norm)
startQueueRef.current.push({ ...item, url: norm })
// pump einmal pro Tick schedulen
if (!pumpStartQueueScheduledRef.current) {
pumpStartQueueScheduledRef.current = true
queueMicrotask(() => {
pumpStartQueueScheduledRef.current = false
void pumpStartQueue()
})
}
return true
},
// pumpStartQueue kommt gleich darunter (useCallback), daher eslint ggf. meckert -> ok, wir definieren pumpStartQueue als function declaration unten
[]
)
async function doStartNow(normUrl: string, silent: boolean): Promise<boolean> {
normUrl = canonicalizeProviderUrl(normUrl)
// ✅ Duplicate-running guard (wie vorher)
const alreadyRunning = jobsRef.current.some((j) => {
if (String(j.status || '').toLowerCase() !== 'running') return false
if ((j as any).endedAt) return false
const jNorm0 = normalizeHttpUrl(String((j as any).sourceUrl || ''))
const jNorm = jNorm0 ? canonicalizeProviderUrl(jNorm0) : ''
return jNorm === normUrl
})
if (alreadyRunning) return true
try {
const currentCookies = cookiesRef.current
const provider = getProviderFromNormalizedUrl(normUrl)
if (!provider) {
if (!silent) setError('Nur chaturbate.com oder myfreecams.com werden unterstützt.')
return false
}
if (provider === 'chaturbate' && !hasRequiredChaturbateCookies(currentCookies)) {
if (!silent) setError('Für Chaturbate müssen die Cookies "cf_clearance" und "sessionId" gesetzt sein.')
return false
}
const cookieString = Object.entries(currentCookies)
.map(([k, v]) => `${k}=${v}`)
.join('; ')
const created = await apiJSON<RecordJob>('/api/record', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ url: normUrl, cookie: cookieString }),
})
if (created?.id) startedToastByJobIdRef.current[String(created.id)] = true
// UI sofort aktualisieren (optional)
setJobs((prev) => [created, ...prev])
jobsRef.current = [created, ...jobsRef.current]
return true
} catch (e: any) {
const msg = e?.message ?? String(e)
// ✅ Spezialfall: Age-Gate / Cloudflare / kein Room-HTML => Cookies Hinweis
if (isCookieGateError(msg)) {
showMissingCookiesMessage({ silent })
return false
}
if (!silent) setError(msg)
return false
}
}
async function pumpStartQueue(): Promise<void> {
// so viele wie möglich parallel starten
while (startInFlightRef.current < START_CONCURRENCY && startQueueRef.current.length > 0) {
const next = startQueueRef.current.shift()!
startInFlightRef.current++
setBusyFromStarts()
void (async () => {
try {
const ok = await doStartNow(next.url, next.silent)
// wenn das aus pendingAutoStartByKey kam: nur bei Erfolg dort löschen
if (ok && next.pendingKeyLower) {
const kLower = next.pendingKeyLower
setPendingAutoStartByKey((prev) => {
const copy = { ...(prev || {}) }
delete copy[kLower]
pendingAutoStartByKeyRef.current = copy
return copy
})
}
} finally {
// dedupe wieder freigeben
startQueuedSetRef.current.delete(next.url)
startInFlightRef.current = Math.max(0, startInFlightRef.current - 1)
setBusyFromStarts()
// falls noch was da ist: weiterpumpen
if (startQueueRef.current.length > 0) {
void pumpStartQueue()
}
}
})()
}
}
// ✅ Zentraler Snapshot: username(lower) -> room // ✅ Zentraler Snapshot: username(lower) -> room
const [cbOnlineByKeyLower, setCbOnlineByKeyLower] = useState<Record<string, ChaturbateOnlineRoom>>({}) const [cbOnlineByKeyLower, setCbOnlineByKeyLower] = useState<Record<string, ChaturbateOnlineRoom>>({})
const cbOnlineByKeyLowerRef = useRef<Record<string, ChaturbateOnlineRoom>>({}) const cbOnlineByKeyLowerRef = useRef<Record<string, ChaturbateOnlineRoom>>({})
const lastCbShowByKeyLowerRef = useRef<Record<string, string>>({})
// ✅ merkt sich, ob ein Model im letzten Snapshot überhaupt online war
const lastCbOnlineByKeyLowerRef = useRef<Record<string, true>>({})
// ✅ verhindert Toast-Spam direkt beim ersten Poll (Startup)
const cbOnlineInitDoneRef = useRef(false)
// ✅ merkt sich, ob ein Model seit App-Start schon einmal online war
const everCbOnlineByKeyLowerRef = useRef<Record<string, true>>({})
useEffect(() => { useEffect(() => {
cbOnlineByKeyLowerRef.current = cbOnlineByKeyLower cbOnlineByKeyLowerRef.current = cbOnlineByKeyLower
}, [cbOnlineByKeyLower]) }, [cbOnlineByKeyLower])
@ -539,8 +837,9 @@ export default function App() {
// ✅ StartURL (hier habe ich den alten Online-Fetch entfernt und nur Snapshot genutzt) // ✅ StartURL (hier habe ich den alten Online-Fetch entfernt und nur Snapshot genutzt)
const startUrl = useCallback(async (rawUrl: string, opts?: { silent?: boolean }): Promise<boolean> => { const startUrl = useCallback(async (rawUrl: string, opts?: { silent?: boolean }): Promise<boolean> => {
const norm = normalizeHttpUrl(rawUrl) const norm0 = normalizeHttpUrl(rawUrl)
if (!norm) return false if (!norm0) return false
const norm = canonicalizeProviderUrl(norm0)
const silent = Boolean(opts?.silent) const silent = Boolean(opts?.silent)
if (!silent) setError(null) if (!silent) setError(null)
@ -565,7 +864,8 @@ export default function App() {
// ✅ Wenn endedAt existiert: Aufnahme ist fertig -> Postwork/Queue -> NICHT blocken // ✅ Wenn endedAt existiert: Aufnahme ist fertig -> Postwork/Queue -> NICHT blocken
if ((j as any).endedAt) return false if ((j as any).endedAt) return false
const jNorm = normalizeHttpUrl(String((j as any).sourceUrl || '')) const jNorm0 = normalizeHttpUrl(String((j as any).sourceUrl || ''))
const jNorm = jNorm0 ? canonicalizeProviderUrl(jNorm0) : ''
return jNorm === norm return jNorm === norm
}) })
if (alreadyRunning) return true if (alreadyRunning) return true
@ -621,11 +921,23 @@ export default function App() {
body: JSON.stringify({ url: norm, cookie: cookieString }), body: JSON.stringify({ url: norm, cookie: cookieString }),
}) })
// ✅ verhindert Doppel-Toast: StartUrl toastet ggf. schon selbst,
// und kurz danach kommt der Job nochmal über SSE/polling rein.
if (created?.id) startedToastByJobIdRef.current[String(created.id)] = true
setJobs((prev) => [created, ...prev]) setJobs((prev) => [created, ...prev])
jobsRef.current = [created, ...jobsRef.current] jobsRef.current = [created, ...jobsRef.current]
return true return true
} catch (e: any) { } catch (e: any) {
if (!silent) setError(e?.message ?? String(e)) const msg = e?.message ?? String(e)
// ✅ Spezialfall: Age-Gate / Cloudflare / kein Room-HTML => Cookies Hinweis
if (isCookieGateError(msg)) {
showMissingCookiesMessage({ silent })
return false
}
if (!silent) setError(msg)
return false return false
} finally { } finally {
setBusy(false) setBusy(false)
@ -849,50 +1161,122 @@ export default function App() {
if (donePage > maxPage) setDonePage(maxPage) if (donePage > maxPage) setDonePage(maxPage)
}, [doneCount, donePage]) }, [doneCount, donePage])
// jobs SSE / polling (unverändert) // jobs SSE / polling (mit "Job gestartet" Toast für Backend-Autostarts)
useEffect(() => { useEffect(() => {
if (!authed) return // ✅ WICHTIG: bei Logout alles stoppen
let cancelled = false let cancelled = false
let es: EventSource | null = null let es: EventSource | null = null
let fallbackTimer: number | null = null let fallbackTimer: number | null = null
let inFlight = false let inFlight = false
const stopFallbackPolling = () => {
if (fallbackTimer) {
window.clearInterval(fallbackTimer)
fallbackTimer = null
}
}
const applyList = (list: any) => { const applyList = (list: any) => {
const arr = Array.isArray(list) ? (list as RecordJob[]) : [] const arr = Array.isArray(list) ? (list as RecordJob[]) : []
if (cancelled) return if (cancelled) return
// --- vorheriger Snapshot für Status-Transitions ---
const prev = jobsRef.current const prev = jobsRef.current
const prevById = new Map(prev.map((j) => [j.id, j.status])) const prevStatusById = new Map<string, string>()
for (const j of Array.isArray(prev) ? prev : []) {
const id = String((j as any)?.id ?? '')
if (!id) continue
prevStatusById.set(id, String((j as any)?.status ?? ''))
}
// ✅ 0) Initial load: KEINE Toasts, aber als "gesehen" markieren (falls du später wieder Start-Toast einführen willst)
if (!jobsInitDoneRef.current) {
const seen: Record<string, true> = {}
for (const j of arr) {
const id = String((j as any)?.id ?? '')
if (id) seen[id] = true
}
startedToastByJobIdRef.current = seen
jobsInitDoneRef.current = true
}
// ✅ Finished/Stopped/Failed Transition zählen -> Count-Hint + Asset-Bump
const terminal = new Set(['finished', 'stopped', 'failed']) const terminal = new Set(['finished', 'stopped', 'failed'])
let endedDelta = 0 let endedDelta = 0
for (const j of arr) { for (const j of arr) {
const ps = prevById.get(j.id) const id = String((j as any)?.id ?? '')
if (!ps || ps === j.status) continue if (!id) continue
const before = String(prevStatusById.get(id) ?? '').toLowerCase().trim()
const now = String((j as any)?.status ?? '').toLowerCase().trim()
if (!before || before === now) continue
// nur zählen, wenn wir "neu" in einen terminal state gehen // nur zählen, wenn wir "neu" in einen terminal state gehen
if (terminal.has(j.status) && !terminal.has(ps)) { if (terminal.has(now) && !terminal.has(before)) endedDelta++
endedDelta++
}
} }
if (endedDelta > 0) { if (endedDelta > 0) {
// ✅ Tabs/Count sofort aktualisieren auch wenn Finished-Tab nicht offen ist
window.dispatchEvent( window.dispatchEvent(
new CustomEvent('finished-downloads:count-hint', { detail: { delta: endedDelta } }) new CustomEvent('finished-downloads:count-hint', { detail: { delta: endedDelta } })
) )
// deine bestehenden Asset-Bumps (thumbnails etc.)
bumpAssetsTwice() bumpAssetsTwice()
} }
setJobs(arr) // ---- Queue-Info berechnen (Postwork-Warteschlange) ----
jobsRef.current = arr const statusLower = (j: any) => String(j?.status ?? '').toLowerCase().trim()
const isPostworkQueued = (j: any) => {
const s = statusLower(j)
return s === 'postwork' || s === 'queued_postwork' || s === 'waiting_postwork'
}
const ts = (j: any) =>
Number(
j?.endedAtMs ??
j?.endedAt ??
j?.createdAtMs ??
j?.createdAt ??
j?.startedAtMs ??
j?.startedAt ??
0
) || 0
const postworkQueue = arr
.filter(isPostworkQueued)
.slice()
.sort((a, b) => ts(a) - ts(b))
const postworkTotal = postworkQueue.length
const postworkPosById = new Map<string, number>()
for (let i = 0; i < postworkQueue.length; i++) {
const id = String((postworkQueue[i] as any)?.id ?? '')
if (id) postworkPosById.set(id, i + 1)
}
const arrWithQueue = arr.map((j: any) => {
const id = String(j?.id ?? '')
const pos = id ? postworkPosById.get(id) : undefined
if (!pos) return j
return {
...j,
postworkQueuePos: pos,
postworkQueueTotal: postworkTotal,
}
})
setJobs(arrWithQueue)
jobsRef.current = arrWithQueue
setPlayerJob((prevJob) => { setPlayerJob((prevJob) => {
if (!prevJob) return prevJob if (!prevJob) return prevJob
const updated = arr.find((j) => j.id === prevJob.id)
const updated = arrWithQueue.find((j) => j.id === prevJob.id)
if (updated) return updated if (updated) return updated
// wenn running und nicht mehr in list: player schließen, sonst stehen lassen
return prevJob.status === 'running' ? null : prevJob return prevJob.status === 'running' ? null : prevJob
}) })
} }
@ -919,7 +1303,13 @@ export default function App() {
es = new EventSource('/api/record/stream') es = new EventSource('/api/record/stream')
// ✅ wenn SSE wieder verbunden ist: Fallback-Polling stoppen
es.onopen = () => {
stopFallbackPolling()
}
const onJobs = (ev: MessageEvent) => { const onJobs = (ev: MessageEvent) => {
stopFallbackPolling() // ✅ sobald Daten kommen, Polling aus
try { try {
applyList(JSON.parse(ev.data)) applyList(JSON.parse(ev.data))
} catch {} } catch {}
@ -932,18 +1322,20 @@ export default function App() {
if (!document.hidden) void loadOnce() if (!document.hidden) void loadOnce()
} }
document.addEventListener('visibilitychange', onVis) document.addEventListener('visibilitychange', onVis)
window.addEventListener('hover', onVis)
// ❌ das hier empfehle ich rauszuwerfen, siehe Schritt C
// window.addEventListener('hover', onVis)
return () => { return () => {
cancelled = true cancelled = true
if (fallbackTimer) window.clearInterval(fallbackTimer) stopFallbackPolling()
document.removeEventListener('visibilitychange', onVis) document.removeEventListener('visibilitychange', onVis)
window.removeEventListener('hover', onVis) // window.removeEventListener('hover', onVis)
es?.removeEventListener('jobs', onJobs as any) es?.removeEventListener('jobs', onJobs as any)
es?.close() es?.close()
es = null es = null
} }
}, [bumpAssetsTwice]) }, [authed])
useEffect(() => { useEffect(() => {
if (selectedTab !== 'finished') return if (selectedTab !== 'finished') return
@ -1177,10 +1569,13 @@ export default function App() {
const handleAddToDownloads = useCallback( const handleAddToDownloads = useCallback(
async (job: RecordJob): Promise<boolean> => { async (job: RecordJob): Promise<boolean> => {
const raw = String((job as any)?.sourceUrl ?? '') const raw = String((job as any)?.sourceUrl ?? '')
const url = extractFirstUrl(raw) const url0 = extractFirstUrl(raw)
if (!url) return false if (!url0) return false
// silent=true -> keine rote Error-Box, wir geben Feedback über Checkmark/Toast const norm0 = normalizeHttpUrl(url0)
if (!norm0) return false
const url = canonicalizeProviderUrl(norm0)
const ok = await startUrl(url, { silent: true }) const ok = await startUrl(url, { silent: true })
if (!ok) { if (!ok) {
@ -1921,21 +2316,25 @@ export default function App() {
inFlight = true inFlight = true
try { try {
const text = await navigator.clipboard.readText() const text = await navigator.clipboard.readText()
const url = extractFirstUrl(text) const url0 = extractFirstUrl(text)
if (!url) return if (!url0) return
if (!getProviderFromNormalizedUrl(url)) return
const norm0 = normalizeHttpUrl(url0)
if (!norm0) return
const provider = getProviderFromNormalizedUrl(norm0)
if (!provider) return
const url = canonicalizeProviderUrl(norm0)
if (url === lastClipboardUrlRef.current) return if (url === lastClipboardUrlRef.current) return
lastClipboardUrlRef.current = url lastClipboardUrlRef.current = url
if (autoAddEnabled) setSourceUrl(url) if (autoAddEnabled) setSourceUrl(url)
if (autoStartEnabled) { if (autoStartEnabled) {
if (busyRef.current) { // ✅ immer enqueue (dedupe verhindert doppelt)
pendingStartUrlRef.current = url enqueueStart({ url, silent: false })
} else {
pendingStartUrlRef.current = null
await startUrl(url)
}
} }
} catch { } catch {
// ignore // ignore
@ -1966,15 +2365,6 @@ export default function App() {
} }
}, [autoAddEnabled, autoStartEnabled, startUrl]) }, [autoAddEnabled, autoStartEnabled, startUrl])
useEffect(() => {
if (busy) return
if (!autoStartEnabled) return
const pending = pendingStartUrlRef.current
if (!pending) return
pendingStartUrlRef.current = null
void startUrl(pending)
}, [busy, autoStartEnabled, startUrl])
useEffect(() => { useEffect(() => {
const stop = startChaturbateOnlinePolling({ const stop = startChaturbateOnlinePolling({
getModels: () => { getModels: () => {
@ -2006,7 +2396,11 @@ export default function App() {
if (!data?.enabled) { if (!data?.enabled) {
setCbOnlineByKeyLower({}) setCbOnlineByKeyLower({})
cbOnlineByKeyLowerRef.current = {} cbOnlineByKeyLowerRef.current = {}
lastCbShowByKeyLowerRef.current = {}
setPendingWatchedRooms([]) setPendingWatchedRooms([])
everCbOnlineByKeyLowerRef.current = {}
cbOnlineInitDoneRef.current = false
lastCbOnlineByKeyLowerRef.current = {}
setLastHeaderUpdateAtMs(Date.now()) setLastHeaderUpdateAtMs(Date.now())
return return
} }
@ -2020,6 +2414,97 @@ export default function App() {
setCbOnlineByKeyLower(nextSnap) setCbOnlineByKeyLower(nextSnap)
cbOnlineByKeyLowerRef.current = nextSnap cbOnlineByKeyLowerRef.current = nextSnap
// ✅ Toasts: (A) watched offline->online, (B) waiting->public, (C) online->offline->online => "wieder online"
try {
const notificationsOn = Boolean((recSettingsRef.current as any).enableNotifications ?? true)
const waiting = new Set(['private', 'away', 'hidden'])
// watched-Keys (nur Chaturbate)
const watchedSetLower = new Set(
Object.values(modelsByKeyRef.current || {})
.filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate'))
.map((m) => String(m?.modelKey ?? '').trim().toLowerCase())
.filter(Boolean)
)
const prevShow = lastCbShowByKeyLowerRef.current || {}
const nextShowMap: Record<string, string> = { ...prevShow }
const prevOnline = lastCbOnlineByKeyLowerRef.current || {}
const isInitial = !cbOnlineInitDoneRef.current
// ✅ "war schon mal online" Snapshot (vor diesem Poll)
const everOnline = everCbOnlineByKeyLowerRef.current || {}
const nextEverOnline: Record<string, true> = { ...everOnline }
for (const [keyLower, room] of Object.entries(nextSnap)) {
const nowShow = String((room as any)?.current_show ?? '').toLowerCase().trim()
const beforeShow = String(prevShow[keyLower] ?? '').toLowerCase().trim()
const wasOnline = Boolean(prevOnline[keyLower])
const isOnline = true // weil es in nextSnap ist
const becameOnline = isOnline && !wasOnline
// ✅ war irgendwann schon mal online (vor diesem Poll)?
const hadEverBeenOnline = Boolean(everOnline[keyLower])
const name = String((room as any)?.username ?? keyLower).trim() || keyLower
const imageUrl = String((room as any)?.image_url ?? '').trim()
// immer merken: jetzt ist es online
nextEverOnline[keyLower] = true
// (B) waiting -> public => "wieder online" (höchste Priorität, damit kein Doppel-Toast)
const becamePublicFromWaiting = nowShow === 'public' && waiting.has(beforeShow)
if (becamePublicFromWaiting) {
if (notificationsOn) {
notify.info(name, 'ist wieder online.', {
imageUrl,
imageAlt: `${name} Vorschau`,
durationMs: 5500,
})
}
if (nowShow) nextShowMap[keyLower] = nowShow
continue
}
// (A/C) watched: offline -> online
if (watchedSetLower.has(keyLower) && becameOnline) {
// C: online->offline->online => "wieder online"
const cameBackFromOffline = hadEverBeenOnline
// Startup-Spam vermeiden
if (notificationsOn && !isInitial) {
notify.info(
name,
cameBackFromOffline ? 'ist wieder online.' : 'ist online.',
{
imageUrl,
imageAlt: `${name} Vorschau`,
durationMs: 5500,
}
)
}
}
if (nowShow) nextShowMap[keyLower] = nowShow
}
// Presence-Snapshot merken
const nextOnline: Record<string, true> = {}
for (const k of Object.keys(nextSnap)) nextOnline[k] = true
lastCbOnlineByKeyLowerRef.current = nextOnline
// ✅ "ever online" merken
everCbOnlineByKeyLowerRef.current = nextEverOnline
cbOnlineInitDoneRef.current = true
lastCbShowByKeyLowerRef.current = nextShowMap
} catch {
// ignore
}
// Online-Keys für Store // Online-Keys für Store
const storeKeys = chaturbateStoreKeysLowerRef.current const storeKeys = chaturbateStoreKeysLowerRef.current
const nextOnlineStore: Record<string, true> = {} const nextOnlineStore: Record<string, true> = {}
@ -2100,16 +2585,8 @@ export default function App() {
const url = pendingMap[kLower] const url = pendingMap[kLower]
if (!url) continue if (!url) continue
const ok = await startUrl(url, { silent: true }) // ✅ nicht mehr seriell awaiten, sondern in die Start-Queue
if (ok) { enqueueStart({ url, silent: true, pendingKeyLower: kLower })
// ✅ State + Ref gleichzeitig “synchron” löschen
setPendingAutoStartByKey((prev) => {
const copy = { ...(prev || {}) }
delete copy[kLower]
pendingAutoStartByKeyRef.current = copy
return copy
})
}
} }
setLastHeaderUpdateAtMs(Date.now()) setLastHeaderUpdateAtMs(Date.now())

View File

@ -144,8 +144,11 @@ async function apiJSON<T>(url: string, init?: RequestInit): Promise<T> {
return res.json() as Promise<T> return res.json() as Promise<T>
} }
function postWorkLabel(job: RecordJob): string { function postWorkLabel(
const pw = job.postWork job: RecordJob,
override?: { pos?: number; total?: number }
): string {
const pw = (job as any).postWork
if (!pw) return 'Warte auf Nacharbeiten…' if (!pw) return 'Warte auf Nacharbeiten…'
@ -158,24 +161,37 @@ function postWorkLabel(job: RecordJob): string {
} }
if (pw.state === 'queued') { if (pw.state === 'queued') {
const pos = typeof pw.position === 'number' ? pw.position : 0 // Backend-Werte (können was anderes zählen -> deshalb nur Fallback)
const waiting = typeof pw.waiting === 'number' ? pw.waiting : 0 const posServer = typeof pw.position === 'number' ? pw.position : 0
const running = typeof (pw as any).running === 'number' ? (pw as any).running : 0 const waitingServer = typeof pw.waiting === 'number' ? pw.waiting : 0
const runningServer = typeof (pw as any).running === 'number' ? (pw as any).running : 0
const totalServer = Math.max(waitingServer + runningServer, posServer)
// X = grobe Gesamtmenge (wartend + gerade laufend) const pos =
const total = Math.max(waiting + running, pos) typeof override?.pos === 'number' && Number.isFinite(override.pos) && override.pos > 0
? override.pos
: posServer
const total =
typeof override?.total === 'number' && Number.isFinite(override.total) && override.total > 0
? override.total
: totalServer
// Wunschformat: "64 / X"
return pos > 0 && total > 0 return pos > 0 && total > 0
? `Warte auf Nacharbeiten… ${pos} / ${total}` ? `Warte auf Nacharbeiten… ${pos} / ${total}`
: 'Warte auf Nacharbeiten…' : 'Warte auf Nacharbeiten…'
} }
return 'Warte auf Nacharbeiten…' return 'Warte auf Nacharbeiten…'
} }
function StatusCell({ job }: { job: RecordJob }) { function StatusCell({
job,
postworkInfo,
}: {
job: RecordJob
postworkInfo?: { pos?: number; total?: number }
}) {
const phaseRaw = String((job as any)?.phase ?? '').trim() const phaseRaw = String((job as any)?.phase ?? '').trim()
const progress = Number((job as any)?.progress ?? 0) const progress = Number((job as any)?.progress ?? 0)
@ -186,7 +202,7 @@ function StatusCell({ job }: { job: RecordJob }) {
// ✅ postwork genauer machen (wartend/running + Position) // ✅ postwork genauer machen (wartend/running + Position)
if (phase === 'postwork') { if (phase === 'postwork') {
phaseText = postWorkLabel(job) phaseText = postWorkLabel(job, postworkInfo)
} }
if (isRecording) { if (isRecording) {
@ -240,6 +256,7 @@ function DownloadsCardRow({
blurPreviews, blurPreviews,
modelsByKey, modelsByKey,
stopRequestedIds, stopRequestedIds,
postworkInfoOf,
markStopRequested, markStopRequested,
onOpenPlayer, onOpenPlayer,
onStopJob, onStopJob,
@ -252,6 +269,7 @@ function DownloadsCardRow({
blurPreviews?: boolean blurPreviews?: boolean
modelsByKey: Record<string, { favorite?: boolean; liked?: boolean | null; watching?: boolean }> modelsByKey: Record<string, { favorite?: boolean; liked?: boolean | null; watching?: boolean }>
stopRequestedIds: Record<string, true> stopRequestedIds: Record<string, true>
postworkInfoOf: (job: RecordJob) => { pos?: number; total?: number } | undefined
markStopRequested: (ids: string | string[]) => void markStopRequested: (ids: string | string[]) => void
onOpenPlayer: (job: RecordJob) => void onOpenPlayer: (job: RecordJob) => void
onStopJob: (id: string) => void onStopJob: (id: string) => void
@ -368,7 +386,7 @@ function DownloadsCardRow({
if (phaseLower === 'recording') { if (phaseLower === 'recording') {
phaseText = 'Recording läuft…' phaseText = 'Recording läuft…'
} else if (phaseLower === 'postwork') { } else if (phaseLower === 'postwork') {
phaseText = postWorkLabel(j) phaseText = postWorkLabel(j, postworkInfoOf(j))
} }
const statusText = rawStatus || 'unknown' const statusText = rawStatus || 'unknown'
@ -763,6 +781,69 @@ export default function Downloads({
return jobs.some((j) => !j.endedAt && j.status === 'running') return jobs.some((j) => !j.endedAt && j.status === 'running')
}, [jobs]) }, [jobs])
const postworkQueueInfoById = useMemo(() => {
const infoById = new Map<string, { pos: number; total: number }>()
const enqueueMsOf = (job: RecordJob): number => {
const anyJ = job as any
const pw = anyJ.postWork
return (
toMs(pw?.enqueuedAt) ||
toMs(anyJ.enqueuedAt) ||
toMs(anyJ.queuedAt) ||
toMs(anyJ.createdAt) ||
toMs(anyJ.addedAt) ||
toMs(job.endedAt) || // Postwork entsteht oft nach endedAt
toMs(job.startedAt) ||
0
)
}
// 1) alle relevanten Postwork-Jobs sammeln (queued + running)
const running: RecordJob[] = []
const queued: RecordJob[] = []
for (const j of jobs) {
const pw = (j as any)?.postWork
if (!pw) continue
const state = String(pw.state ?? '').toLowerCase()
if (state === 'running') running.push(j)
else if (state === 'queued') queued.push(j)
}
// 2) Reihenfolge stabil machen (FIFO)
running.sort((a, b) => enqueueMsOf(a) - enqueueMsOf(b))
queued.sort((a, b) => enqueueMsOf(a) - enqueueMsOf(b))
const runningCount = running.length
const total = runningCount + queued.length
// 3) Positionen setzen: running belegt "vorne", queued danach
for (let i = 0; i < queued.length; i++) {
const id = String((queued[i] as any)?.id ?? '')
if (!id) continue
infoById.set(id, { pos: runningCount + i + 1, total })
}
// optional (wenn du auch bei running "x / total" sehen willst):
// for (let i = 0; i < running.length; i++) {
// const id = String((running[i] as any)?.id ?? '')
// if (!id) continue
// infoById.set(id, { pos: i + 1, total })
// }
return infoById
}, [jobs])
const postworkInfoOf = useCallback(
(job: RecordJob) => {
const id = String((job as any)?.id ?? '')
return id ? postworkQueueInfoById.get(id) : undefined
},
[postworkQueueInfoById]
)
useEffect(() => { useEffect(() => {
if (!hasActive) return if (!hasActive) return
const t = window.setInterval(() => setNowMs(Date.now()), 15000) const t = window.setInterval(() => setNowMs(Date.now()), 15000)
@ -954,7 +1035,7 @@ export default function Downloads({
cell: (r) => { cell: (r) => {
if (r.kind === 'job') { if (r.kind === 'job') {
const j = r.job const j = r.job
return <StatusCell job={j} /> return <StatusCell job={j} postworkInfo={postworkInfoOf(j)} />
} }
const p = r.pending const p = r.pending
@ -1073,7 +1154,7 @@ export default function Downloads({
}, },
}, },
] ]
}, [blurPreviews, markStopRequested, modelsByKey, nowMs, onStopJob, onToggleFavorite, onToggleLike, onToggleWatch, stopRequestedIds, stopInitiatedIds]) }, [blurPreviews, markStopRequested, modelsByKey, nowMs, onStopJob, onToggleFavorite, onToggleLike, onToggleWatch, stopRequestedIds, stopInitiatedIds, postworkInfoOf])
const downloadJobRows = useMemo<DownloadRow[]>(() => { const downloadJobRows = useMemo<DownloadRow[]>(() => {
const list = jobs const list = jobs
@ -1197,6 +1278,7 @@ export default function Downloads({
nowMs={nowMs} nowMs={nowMs}
blurPreviews={blurPreviews} blurPreviews={blurPreviews}
modelsByKey={modelsByKey} modelsByKey={modelsByKey}
postworkInfoOf={postworkInfoOf}
stopRequestedIds={stopRequestedIds} stopRequestedIds={stopRequestedIds}
markStopRequested={markStopRequested} markStopRequested={markStopRequested}
onOpenPlayer={onOpenPlayer} onOpenPlayer={onOpenPlayer}
@ -1221,6 +1303,7 @@ export default function Downloads({
nowMs={nowMs} nowMs={nowMs}
blurPreviews={blurPreviews} blurPreviews={blurPreviews}
modelsByKey={modelsByKey} modelsByKey={modelsByKey}
postworkInfoOf={postworkInfoOf}
stopRequestedIds={stopRequestedIds} stopRequestedIds={stopRequestedIds}
markStopRequested={markStopRequested} markStopRequested={markStopRequested}
onOpenPlayer={onOpenPlayer} onOpenPlayer={onOpenPlayer}
@ -1245,6 +1328,7 @@ export default function Downloads({
nowMs={nowMs} nowMs={nowMs}
blurPreviews={blurPreviews} blurPreviews={blurPreviews}
modelsByKey={modelsByKey} modelsByKey={modelsByKey}
postworkInfoOf={postworkInfoOf}
stopRequestedIds={stopRequestedIds} stopRequestedIds={stopRequestedIds}
markStopRequested={markStopRequested} markStopRequested={markStopRequested}
onOpenPlayer={onOpenPlayer} onOpenPlayer={onOpenPlayer}

View File

@ -699,6 +699,9 @@ export default function FinishedDownloads({
// neben deletedKeys / deletingKeys // neben deletedKeys / deletingKeys
const [removingKeys, setRemovingKeys] = React.useState<Set<string>>(() => new Set()) const [removingKeys, setRemovingKeys] = React.useState<Set<string>>(() => new Set())
// ⏱️ Timer pro Key, damit wir Optimistik bei Fehler sauber zurückrollen können
const removeTimersRef = React.useRef<Map<string, number>>(new Map())
const markRemoving = useCallback((key: string, value: boolean) => { const markRemoving = useCallback((key: string, value: boolean) => {
setRemovingKeys((prev) => { setRemovingKeys((prev) => {
const next = new Set(prev) const next = new Set(prev)
@ -708,21 +711,65 @@ export default function FinishedDownloads({
}) })
}, []) }, [])
const cancelRemoveTimer = useCallback((key: string) => {
const t = removeTimersRef.current.get(key)
if (t != null) {
window.clearTimeout(t)
removeTimersRef.current.delete(key)
}
}, [])
const restoreRow = useCallback(
(key: string) => {
// Timer stoppen (falls die "commit delete"-Phase noch aussteht)
cancelRemoveTimer(key)
// wieder sichtbar machen
setDeletedKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
setRemovingKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
setDeletingKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
setKeepingKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
},
[cancelRemoveTimer]
)
const animateRemove = useCallback( const animateRemove = useCallback(
(key: string) => { (key: string) => {
// 1) rot + fade-out starten // 1) rot + fade-out starten
markRemoving(key, true) markRemoving(key, true)
// ggf. alten Timer entfernen (wenn mehrfach getriggert)
cancelRemoveTimer(key)
// 2) nach der Animation wirklich ausblenden + Seite auffüllen // 2) nach der Animation wirklich ausblenden + Seite auffüllen
window.setTimeout(() => { const t = window.setTimeout(() => {
removeTimersRef.current.delete(key)
markDeleted(key) markDeleted(key)
markRemoving(key, false) markRemoving(key, false)
// ✅ wichtig: Seite sofort neu laden -> Item rückt nach
queueRefill() queueRefill()
}, 320) }, 320)
removeTimersRef.current.set(key, t)
}, },
[markDeleted, markRemoving, queueRefill] [markDeleted, markRemoving, queueRefill, cancelRemoveTimer]
) )
const releasePlayingFile = useCallback( const releasePlayingFile = useCallback(
@ -795,6 +842,9 @@ export default function FinishedDownloads({
return true return true
} catch (e: any) { } catch (e: any) {
// ✅ falls irgendwo (z.B. via External-Event) schon optimistisch entfernt wurde: zurückrollen
restoreRow(key)
notify.error('Löschen fehlgeschlagen', String(e?.message || e)) notify.error('Löschen fehlgeschlagen', String(e?.message || e))
return false return false
} finally { } finally {
@ -1060,30 +1110,32 @@ export default function FinishedDownloads({
if (detail.phase === 'start') { if (detail.phase === 'start') {
markDeleting(key, true) markDeleting(key, true)
// ✅ wenn Cards-View: Swipe schon beim Start raus (ohne Aktion, weil App die API schon macht) // ✅ Optimistik: überall gleich -> animiert raus
if (view === 'cards') { animateRemove(key)
window.setTimeout(() => {
markDeleted(key)
}, 320)
} else {
animateRemove(key)
}
} else if (detail.phase === 'error') {
markDeleting(key, false)
// ✅ Swipe zurück, falls Delete fehlgeschlagen return
if (view === 'cards') { }
swipeRefs.current.get(key)?.reset()
} if (detail.phase === 'error') {
} else if (detail.phase === 'success') { // ✅ alles zurückrollen -> wieder sichtbar
restoreRow(key)
// ✅ Swipe zurück (nur Cards relevant, schadet sonst aber nicht)
swipeRefs.current.get(key)?.reset()
return
}
if (detail.phase === 'success') {
// delete final bestätigt
markDeleting(key, false) markDeleting(key, false)
queueRefill() queueRefill()
return
} }
} }
window.addEventListener('finished-downloads:delete', onExternalDelete as EventListener) window.addEventListener('finished-downloads:delete', onExternalDelete as EventListener)
return () => window.removeEventListener('finished-downloads:delete', onExternalDelete as EventListener) return () => window.removeEventListener('finished-downloads:delete', onExternalDelete as EventListener)
}, [animateRemove, markDeleting, markDeleted, view, queueRefill]) }, [animateRemove, markDeleting, queueRefill, restoreRow])
useEffect(() => { useEffect(() => {
const onExternalRename = (ev: Event) => { const onExternalRename = (ev: Event) => {

View File

@ -250,14 +250,15 @@ export default function LoginPage({ onLoggedIn }: Props) {
<div className="space-y-1"> <div className="space-y-1">
<label htmlFor="totp" className="text-xs font-medium text-gray-700 dark:text-gray-200">2FA Code</label> <label htmlFor="totp" className="text-xs font-medium text-gray-700 dark:text-gray-200">2FA Code</label>
<input <input
id="totp" id="id_code"
name="totp" name="code"
aria-label="totp" aria-label="totp"
type="text" type="text"
value={code} value={code}
onChange={(e) => setCode(e.target.value)} onChange={(e) => setCode(e.target.value)}
onKeyDown={onEnter} onKeyDown={onEnter}
autoComplete="one-time-code" autoComplete="one-time-code"
required
inputMode="numeric" inputMode="numeric"
pattern="[0-9]*" pattern="[0-9]*"
maxLength={6} maxLength={6}
@ -349,13 +350,14 @@ export default function LoginPage({ onLoggedIn }: Props) {
<label htmlFor="totp" className="text-xs font-medium text-gray-700 dark:text-gray-200">2FA Code (zum Aktivieren)</label> <label htmlFor="totp" className="text-xs font-medium text-gray-700 dark:text-gray-200">2FA Code (zum Aktivieren)</label>
<input <input
id="totp-setup" id="totp-setup"
name="totp" name="code"
aria-label="totp" aria-label="totp"
type="text" type="text"
value={code} value={code}
onChange={(e) => setCode(e.target.value)} onChange={(e) => setCode(e.target.value)}
onKeyDown={onEnter} onKeyDown={onEnter}
autoComplete="one-time-code" autoComplete="one-time-code"
required
inputMode="numeric" inputMode="numeric"
pattern="[0-9]*" pattern="[0-9]*"
maxLength={6} maxLength={6}

File diff suppressed because it is too large Load Diff

View File

@ -21,7 +21,7 @@ type RecorderSettings = {
teaserPlayback?: 'still' | 'hover' | 'all' teaserPlayback?: 'still' | 'hover' | 'all'
teaserAudio?: boolean teaserAudio?: boolean
lowDiskPauseBelowGB?: number lowDiskPauseBelowGB?: number
enableNotifications?: boolean
} }
type DiskStatus = { type DiskStatus = {
@ -47,6 +47,7 @@ const DEFAULTS: RecorderSettings = {
teaserPlayback: 'hover', teaserPlayback: 'hover',
teaserAudio: false, teaserAudio: false,
lowDiskPauseBelowGB: 5, lowDiskPauseBelowGB: 5,
enableNotifications: true,
} }
type Props = { type Props = {
@ -94,6 +95,7 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
teaserPlayback: (data as any).teaserPlayback ?? DEFAULTS.teaserPlayback, teaserPlayback: (data as any).teaserPlayback ?? DEFAULTS.teaserPlayback,
teaserAudio: (data as any).teaserAudio ?? DEFAULTS.teaserAudio, teaserAudio: (data as any).teaserAudio ?? DEFAULTS.teaserAudio,
lowDiskPauseBelowGB: (data as any).lowDiskPauseBelowGB ?? DEFAULTS.lowDiskPauseBelowGB, lowDiskPauseBelowGB: (data as any).lowDiskPauseBelowGB ?? DEFAULTS.lowDiskPauseBelowGB,
enableNotifications: (data as any).enableNotifications ?? DEFAULTS.enableNotifications,
}) })
}) })
.catch(() => { .catch(() => {
@ -186,6 +188,7 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
: DEFAULTS.teaserPlayback : DEFAULTS.teaserPlayback
const teaserAudio = !!value.teaserAudio const teaserAudio = !!value.teaserAudio
const lowDiskPauseBelowGB = Math.max(1, Math.floor(Number(value.lowDiskPauseBelowGB ?? DEFAULTS.lowDiskPauseBelowGB))) const lowDiskPauseBelowGB = Math.max(1, Math.floor(Number(value.lowDiskPauseBelowGB ?? DEFAULTS.lowDiskPauseBelowGB)))
const enableNotifications = !!value.enableNotifications
setSaving(true) setSaving(true)
try { try {
@ -206,6 +209,7 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
teaserPlayback, teaserPlayback,
teaserAudio, teaserAudio,
lowDiskPauseBelowGB, lowDiskPauseBelowGB,
enableNotifications,
}), }),
}) })
if (!res.ok) { if (!res.ok) {
@ -540,6 +544,13 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
description="Wenn aktiv, werden Vorschau/Teaser nicht stumm geschaltet." description="Wenn aktiv, werden Vorschau/Teaser nicht stumm geschaltet."
/> />
<LabeledSwitch
checked={!!value.enableNotifications}
onChange={(checked) => setValue((v) => ({ ...v, enableNotifications: checked }))}
label="Benachrichtigungen"
description="Wenn aktiv, zeigt das Frontend Toasts (z.B. wenn watched Models online/live gehen oder wenn ein queued Model wieder public wird)."
/>
<div className="rounded-xl border border-gray-200 bg-gray-50 p-3 dark:border-white/10 dark:bg-white/5"> <div className="rounded-xl border border-gray-200 bg-gray-50 p-3 dark:border-white/10 dark:bg-white/5">
<div className="flex items-start justify-between gap-3"> <div className="flex items-start justify-between gap-3">
<div> <div>

View File

@ -1,3 +1,5 @@
// frontend\src\components\ui\ToastProvider.tsx
'use client' 'use client'
import * as React from 'react' import * as React from 'react'
@ -17,6 +19,8 @@ export type Toast = {
type: ToastType type: ToastType
title?: string title?: string
message?: string message?: string
imageUrl?: string
imageAlt?: string
durationMs?: number // auto close durationMs?: number // auto close
} }
@ -82,126 +86,176 @@ export function ToastProvider({
defaultDurationMs?: number defaultDurationMs?: number
position?: 'bottom-right' | 'top-right' | 'bottom-left' | 'top-left' position?: 'bottom-right' | 'top-right' | 'bottom-left' | 'top-left'
}) { }) {
const [toasts, setToasts] = React.useState<Toast[]>([]) const [toasts, setToasts] = React.useState<Toast[]>([])
const [notificationsEnabled, setNotificationsEnabled] = React.useState(true)
const remove = React.useCallback((id: string) => { const loadNotificationSetting = React.useCallback(async () => {
setToasts((prev) => prev.filter((t) => t.id !== id)) try {
}, []) const r = await fetch('/api/settings', { cache: 'no-store' })
if (!r.ok) return
const clear = React.useCallback(() => setToasts([]), []) const data = await r.json()
setNotificationsEnabled(!!(data?.enableNotifications ?? true))
const push = React.useCallback( } catch {
(t: Omit<Toast, 'id'>) => { // ignorieren -> default true
const id = uid()
const durationMs = t.durationMs ?? defaultDurationMs
setToasts((prev) => {
const next = [{ ...t, id, durationMs }, ...prev]
return next.slice(0, Math.max(1, maxToasts))
})
if (durationMs && durationMs > 0) {
window.setTimeout(() => remove(id), durationMs)
} }
}, [])
return id React.useEffect(() => {
}, // initial laden
[defaultDurationMs, maxToasts, remove] loadNotificationSetting()
)
const ctx = React.useMemo<ToastContextValue>(() => ({ push, remove, clear }), [push, remove, clear]) // nach "Speichern" in Settings neu laden
const onUpdated = () => loadNotificationSetting()
window.addEventListener('recorder-settings-updated', onUpdated)
return () => window.removeEventListener('recorder-settings-updated', onUpdated)
}, [loadNotificationSetting])
const posCls = // optional: wenn deaktiviert, alle aktuellen Toasts ausblenden
position === 'top-right' React.useEffect(() => {
? 'items-start sm:items-start sm:justify-start' if (!notificationsEnabled) {
: position === 'top-left' // ✅ Nur nicht-Fehler ausblenden, Fehler dürfen bleiben
setToasts((prev) => prev.filter((t) => t.type === 'error'))
}
}, [notificationsEnabled])
const remove = React.useCallback((id: string) => {
setToasts((prev) => prev.filter((t) => t.id !== id))
}, [])
const clear = React.useCallback(() => setToasts([]), [])
const push = React.useCallback(
(t: Omit<Toast, 'id'>) => {
// ✅ Errors IMMER zeigen, alles andere abhängig vom Toggle
if (!notificationsEnabled && t.type !== 'error') return ''
const id = uid()
const durationMs = t.durationMs ?? defaultDurationMs
setToasts((prev) => {
const next = [{ ...t, id, durationMs }, ...prev]
return next.slice(0, Math.max(1, maxToasts))
})
if (durationMs && durationMs > 0) {
window.setTimeout(() => remove(id), durationMs)
}
return id
},
[defaultDurationMs, maxToasts, remove, notificationsEnabled]
)
const ctx = React.useMemo<ToastContextValue>(() => ({ push, remove, clear }), [push, remove, clear])
const posCls =
position === 'top-right'
? 'items-start sm:items-start sm:justify-start' ? 'items-start sm:items-start sm:justify-start'
: position === 'bottom-left' : position === 'top-left'
? 'items-end sm:items-end sm:justify-end' ? 'items-start sm:items-start sm:justify-start'
: 'items-end sm:items-end sm:justify-end' : position === 'bottom-left'
? 'items-end sm:items-end sm:justify-end'
: 'items-end sm:items-end sm:justify-end'
const alignCls = const alignCls =
position.endsWith('left') position.endsWith('left')
? 'sm:items-start' ? 'sm:items-start'
: 'sm:items-end' : 'sm:items-end'
const insetCls = const insetCls =
position.startsWith('top') position.startsWith('top')
? 'top-0 bottom-auto' ? 'top-0 bottom-auto'
: 'bottom-0 top-auto' : 'bottom-0 top-auto'
return ( return (
<ToastContext.Provider value={ctx}> <ToastContext.Provider value={ctx}>
{children} {children}
{/* Live region */} {/* Live region */}
<div <div
aria-live="assertive" aria-live="assertive"
className={[ className={[
'pointer-events-none fixed z-[80] inset-x-0', 'pointer-events-none fixed z-[80] inset-x-0',
insetCls, insetCls,
].join(' ')} ].join(' ')}
> >
<div className={['flex w-full px-4 py-6 sm:p-6', posCls].join(' ')}> <div className={['flex w-full px-4 py-6 sm:p-6', posCls].join(' ')}>
<div className={['flex w-full flex-col space-y-3', alignCls].join(' ')}> <div className={['flex w-full flex-col space-y-3', alignCls].join(' ')}>
{toasts.map((t) => { {toasts.map((t) => {
const { Icon, cls } = iconFor(t.type) const { Icon, cls } = iconFor(t.type)
const title = (t.title || '').trim() || titleDefault(t.type) const title = (t.title || '').trim() || titleDefault(t.type)
const msg = (t.message || '').trim() const msg = (t.message || '').trim()
const img = (t.imageUrl || '').trim()
const imgAlt = (t.imageAlt || title).trim()
return ( return (
<Transition key={t.id} appear show={true}> <Transition key={t.id} appear show={true}>
<div <div
className={[ className={[
'pointer-events-auto w-full max-w-sm overflow-hidden rounded-xl', 'pointer-events-auto w-full max-w-sm overflow-hidden rounded-xl',
'border bg-white/90 shadow-lg backdrop-blur', 'border bg-white/90 shadow-lg backdrop-blur',
'outline-1 outline-black/5', 'outline-1 outline-black/5',
'dark:bg-gray-950/70 dark:-outline-offset-1 dark:outline-white/10', 'dark:bg-gray-950/70 dark:-outline-offset-1 dark:outline-white/10',
borderFor(t.type), borderFor(t.type),
// animation classes (headlessui v2 data-*) // animation classes (headlessui v2 data-*)
'transition data-closed:opacity-0 data-enter:transform data-enter:duration-200 data-enter:ease-out', 'transition data-closed:opacity-0 data-enter:transform data-enter:duration-200 data-enter:ease-out',
'data-closed:data-enter:translate-y-2 sm:data-closed:data-enter:translate-y-0', 'data-closed:data-enter:translate-y-2 sm:data-closed:data-enter:translate-y-0',
position.endsWith('right') position.endsWith('right')
? 'sm:data-closed:data-enter:translate-x-2' ? 'sm:data-closed:data-enter:translate-x-2'
: 'sm:data-closed:data-enter:-translate-x-2', : 'sm:data-closed:data-enter:-translate-x-2',
].join(' ')} ].join(' ')}
> >
<div className="p-4"> <div className="p-4">
<div className="flex items-start gap-3"> <div className="flex items-start gap-3">
<div className="shrink-0"> {img ? (
<Icon className={['size-6', cls].join(' ')} aria-hidden="true" /> <div className="shrink-0">
</div> <img
src={img}
alt={imgAlt}
loading="lazy"
referrerPolicy="no-referrer"
className={[
'h-12 w-12 rounded-lg object-cover',
'ring-1 ring-black/10 dark:ring-white/10',
].join(' ')}
/>
</div>
) : (
<div className="shrink-0">
<Icon className={['size-6', cls].join(' ')} aria-hidden="true" />
</div>
)}
<div className="min-w-0 flex-1"> <div className="min-w-0 flex-1">
<p className="text-sm font-semibold text-gray-900 dark:text-white"> <p className="text-sm font-semibold text-gray-900 dark:text-white">
{title} {title}
</p>
{msg ? (
<p className="mt-1 text-sm text-gray-600 dark:text-gray-300 break-words">
{msg}
</p> </p>
) : null} {msg ? (
</div> <p className="mt-1 text-sm text-gray-600 dark:text-gray-300 break-words">
{msg}
</p>
) : null}
</div>
<button <button
type="button" type="button"
onClick={() => remove(t.id)} onClick={() => remove(t.id)}
className="shrink-0 rounded-md text-gray-400 hover:text-gray-600 focus:outline-2 focus:outline-offset-2 focus:outline-indigo-600 dark:hover:text-white dark:focus:outline-indigo-500" className="shrink-0 rounded-md text-gray-400 hover:text-gray-600 focus:outline-2 focus:outline-offset-2 focus:outline-indigo-600 dark:hover:text-white dark:focus:outline-indigo-500"
> >
<span className="sr-only">Close</span> <span className="sr-only">Close</span>
<XMarkIcon aria-hidden="true" className="size-5" /> <XMarkIcon aria-hidden="true" className="size-5" />
</button> </button>
</div>
</div> </div>
</div> </div>
</div> </Transition>
</Transition> )
) })}
})} </div>
</div> </div>
</div> </div>
</div> </ToastContext.Provider>
</ToastContext.Provider> )
)
} }
export function useToast() { export function useToast() {

View File

@ -1,3 +1,5 @@
// frontend\src\main.tsx
import { StrictMode } from 'react' import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client' import { createRoot } from 'react-dom/client'
import './index.css' import './index.css'