This commit is contained in:
Linrador 2026-02-12 11:33:21 +01:00
parent 76ea79a1a9
commit 97eafb10e7
28 changed files with 3398 additions and 1622 deletions

View File

@ -1,3 +1,5 @@
// backend\chaturbate_autostart.go
package main
import (
@ -45,6 +47,7 @@ func cookieHeaderFromSettings(s RecorderSettings) string {
if err != nil || len(m) == 0 {
return ""
}
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
@ -52,18 +55,25 @@ func cookieHeaderFromSettings(s RecorderSettings) string {
sort.Strings(keys)
var b strings.Builder
for i, k := range keys {
first := true
for _, k := range keys {
v := strings.TrimSpace(m[k])
k = strings.TrimSpace(k)
if k == "" || v == "" {
continue
}
if i > 0 {
if !first {
b.WriteString("; ")
}
first = false
b.WriteString(k)
b.WriteString("=")
b.WriteString(v)
}
return b.String()
}
@ -82,7 +92,9 @@ func resolveChaturbateURL(m WatchedModelLite) string {
// Startet watched+online(public) automatisch unabhängig vom Frontend
func startChaturbateAutoStartWorker(store *ModelStore) {
if store == nil {
if verboseLogs() {
fmt.Println("⚠️ [autostart] model store is nil")
}
return
}
@ -213,9 +225,13 @@ func startChaturbateAutoStartWorker(store *ModelStore) {
Cookie: cookieHdr,
})
if err != nil {
if verboseLogs() {
fmt.Println("❌ [autostart] start failed:", it.url, err)
}
} else {
if verboseLogs() {
fmt.Println("▶️ [autostart] started:", it.url)
}
lastStart = time.Now()
}
}

View File

@ -261,7 +261,7 @@ func startChaturbateOnlinePoller(store *ModelStore) {
fmt.Println("✅ [chaturbate] online rooms fetch recovered")
lastLoggedErr = ""
}
if len(rooms) != lastLoggedCount {
if verboseLogs() && len(rooms) != lastLoggedCount {
fmt.Println("✅ [chaturbate] online rooms:", len(rooms))
lastLoggedCount = len(rooms)
}

Binary file not shown.

Binary file not shown.

99
backend/log_policy.go Normal file
View File

@ -0,0 +1,99 @@
// backend/log_policy.go
package main
import (
"context"
"errors"
"os"
"strings"
)
// Optional: Verbose nur wenn du es explizit willst (z.B. beim Debuggen)
func verboseLogs() bool {
return os.Getenv("REC_VERBOSE") == "1"
}
func shouldLogRecordError(err error, provider string, req RecordRequest) bool {
if err == nil {
return false
}
// "STOP" / Cancel ist normal -> kein Fehlerlog
if errors.Is(err, context.Canceled) {
return false
}
msg := strings.ToLower(err.Error())
// --- Chaturbate: Cookie/Auth/CF-Probleme IMMER loggen (auch bei Hidden) ---
if provider == "chaturbate" {
// deine explizite Cookie-Fehlermeldung
if strings.Contains(msg, "cf_clearance") && strings.Contains(msg, "cookie") {
return true
}
// typische Auth/CF/Blocker-Indikatoren
if strings.Contains(msg, "403") || strings.Contains(msg, "401") ||
strings.Contains(msg, "cloudflare") || strings.Contains(msg, "cf") ||
strings.Contains(msg, "captcha") || strings.Contains(msg, "forbidden") {
return true
}
}
// --- harte Config/IO-Fehler (immer loggen) ---
if strings.Contains(msg, "recorddir") ||
strings.Contains(msg, "auflösung fehlgeschlagen") ||
strings.Contains(msg, "permission") ||
strings.Contains(msg, "access is denied") ||
strings.Contains(msg, "read-only") {
return true
}
// --- erwartbare "Provider/Offline"-Situationen: NIE loggen ---
// unsupported provider
if strings.Contains(msg, "unsupported provider") {
return false
}
// Chaturbate offline/parse/watch-segments end
if strings.Contains(msg, "kein hls") ||
strings.Contains(msg, "room dossier") ||
strings.Contains(msg, "keine neuen hls-segmente") ||
strings.Contains(msg, "playlist nicht mehr erreichbar") ||
strings.Contains(msg, "möglicherweise offline") ||
strings.Contains(msg, "stream vermutlich offline") {
return false
}
// MFC: "nicht public"/offline/private/not exist
if strings.Contains(msg, "mfc: stream wurde nicht public") ||
strings.Contains(msg, "mfc: stream ist nicht public") ||
strings.Contains(msg, "stream ist nicht öffentlich") ||
strings.Contains(msg, "status: offline") ||
strings.Contains(msg, "status: private") ||
strings.Contains(msg, "status: notexist") {
return false
}
// ffmpeg-Fehler:
// - bei Hidden (Autostart/Auto-Checks) meist "offline/kurzlebig" => stumm
// - bei manuell gestarteten Jobs sinnvoll => loggen
if strings.Contains(msg, "ffmpeg") {
if req.Hidden {
return false
}
return true
}
// Default:
// - Hidden-Jobs sollen ruhig sein
// - manuelle Jobs dürfen Fehler loggen (aber keine "offline"/"expected" s.o.)
return !req.Hidden
}
func shouldLogRecordInfo(req RecordRequest) bool {
// Standard: keine Info-Logs (wie auto-deleted), außer du setzt REC_VERBOSE=1
if verboseLogs() {
return true
}
return false
}

View File

@ -39,7 +39,6 @@ import (
"github.com/grafov/m3u8"
gocpu "github.com/shirou/gopsutil/v3/cpu"
godisk "github.com/shirou/gopsutil/v3/disk"
"github.com/sqweek/dialog"
"golang.org/x/image/font"
"golang.org/x/image/font/basicfont"
"golang.org/x/image/math/fixed"
@ -69,6 +68,7 @@ type RecordJob struct {
VideoWidth int `json:"videoWidth,omitempty"`
VideoHeight int `json:"videoHeight,omitempty"`
FPS float64 `json:"fps,omitempty"`
Meta *videoMeta `json:"meta,omitempty"`
Hidden bool `json:"-"`
@ -184,6 +184,163 @@ func probeVideoProps(ctx context.Context, filePath string) (w int, h int, fps fl
return w, h, fps, nil
}
func metaJSONPathForAssetID(assetID string) (string, error) {
root, err := generatedMetaRoot()
if err != nil {
return "", err
}
if strings.TrimSpace(root) == "" {
return "", fmt.Errorf("generated/meta root leer")
}
return filepath.Join(root, assetID, "meta.json"), nil
}
func readVideoMetaIfValid(metaPath string, fi os.FileInfo) (*videoMeta, bool) {
b, err := os.ReadFile(metaPath)
if err != nil || len(b) == 0 {
return nil, false
}
var m videoMeta
if err := json.Unmarshal(b, &m); err != nil {
return nil, false
}
// nur akzeptieren wenn Datei identisch (damit wir nicht stale Werte zeigen)
if m.FileSize != fi.Size() || m.FileModUnix != fi.ModTime().Unix() {
return nil, false
}
// Mindestvalidierung
if m.DurationSeconds <= 0 {
return nil, false
}
return &m, true
}
func ensureVideoMetaForFile(ctx context.Context, fullPath string, fi os.FileInfo, sourceURL string) (*videoMeta, bool) {
// assetID aus Dateiname
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
// sanitize wie bei deinen generated Ordnern
var err error
assetID, err = sanitizeID(assetID)
if err != nil || assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil {
return nil, false
}
// 1) valid meta vorhanden?
if m, ok := readVideoMetaIfValid(metaPath, fi); ok {
return m, true
}
// 2) sonst neu erzeugen (mit Concurrency-Limit)
if ctx == nil {
ctx = context.Background()
}
cctx, cancel := context.WithTimeout(ctx, 8*time.Second)
defer cancel()
if durSem != nil {
if err := durSem.Acquire(cctx); err != nil {
return nil, false
}
defer durSem.Release()
}
// Dauer
dur, derr := durationSecondsCached(cctx, fullPath)
if derr != nil || dur <= 0 {
return nil, false
}
// Video props
w, h, fps, perr := probeVideoProps(cctx, fullPath)
if perr != nil {
// width/height/fps dürfen 0 bleiben, duration ist aber trotzdem nützlich
w, h, fps = 0, 0, 0
}
// meta dir anlegen
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
m := &videoMeta{
Version: 2,
DurationSeconds: dur,
FileSize: fi.Size(),
FileModUnix: fi.ModTime().Unix(),
VideoWidth: w,
VideoHeight: h,
FPS: fps,
Resolution: formatResolution(w, h),
SourceURL: strings.TrimSpace(sourceURL),
UpdatedAtUnix: time.Now().Unix(),
}
b, _ := json.MarshalIndent(m, "", " ")
b = append(b, '\n')
_ = atomicWriteFile(metaPath, b) // best effort
return m, true
}
// ensureVideoMetaForFileBestEffort:
// - versucht zuerst echtes Generieren (ffprobe/ffmpeg) via ensureVideoMetaForFile
// - wenn das fehlschlägt, aber durationSecondsCacheOnly schon was weiß:
// schreibt eine Duration-only meta.json, damit wir künftig "aus meta.json" lesen können.
func ensureVideoMetaForFileBestEffort(ctx context.Context, fullPath string, sourceURL string) (*videoMeta, bool) {
fullPath = strings.TrimSpace(fullPath)
if fullPath == "" {
return nil, false
}
fi, err := os.Stat(fullPath)
if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 {
return nil, false
}
// 1) Normaler Weg: meta erzeugen/lesen (ffprobe/ffmpeg)
if m, ok := ensureVideoMetaForFile(ctx, fullPath, fi, sourceURL); ok && m != nil {
return m, true
}
// 2) Fallback: wenn wir Duration schon im RAM-Cache haben -> meta.json (Duration-only) persistieren
dur := durationSecondsCacheOnly(fullPath, fi)
if dur <= 0 {
return nil, false
}
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil || strings.TrimSpace(metaPath) == "" {
return nil, false
}
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
_ = writeVideoMetaDuration(metaPath, fi, dur, sourceURL)
// nochmal lesen/validieren
if m, ok := readVideoMetaIfValid(metaPath, fi); ok && m != nil {
return m, true
}
return nil, false
}
func (d *dummyResponseWriter) Header() http.Header {
if d.h == nil {
d.h = make(http.Header)
@ -1373,72 +1530,6 @@ func durationSecondsCached(ctx context.Context, path string) (float64, error) {
return sec, nil
}
type RecorderSettings struct {
RecordDir string `json:"recordDir"`
DoneDir string `json:"doneDir"`
FFmpegPath string `json:"ffmpegPath"`
AutoAddToDownloadList bool `json:"autoAddToDownloadList"`
AutoStartAddedDownloads bool `json:"autoStartAddedDownloads"`
UseChaturbateAPI bool `json:"useChaturbateApi"`
UseMyFreeCamsWatcher bool `json:"useMyFreeCamsWatcher"`
// Wenn aktiv, werden fertige Downloads automatisch gelöscht, wenn sie kleiner als der Grenzwert sind.
AutoDeleteSmallDownloads bool `json:"autoDeleteSmallDownloads"`
AutoDeleteSmallDownloadsBelowMB int `json:"autoDeleteSmallDownloadsBelowMB"`
BlurPreviews bool `json:"blurPreviews"`
TeaserPlayback string `json:"teaserPlayback"` // still | hover | all
TeaserAudio bool `json:"teaserAudio"` // ✅ Vorschau/Teaser mit Ton abspielen
// EncryptedCookies contains base64(nonce+ciphertext) of a JSON cookie map.
EncryptedCookies string `json:"encryptedCookies"`
}
var (
settingsMu sync.Mutex
settings = RecorderSettings{
RecordDir: "/records",
DoneDir: "/records/done",
FFmpegPath: "",
AutoAddToDownloadList: false,
AutoStartAddedDownloads: false,
UseChaturbateAPI: false,
UseMyFreeCamsWatcher: false,
AutoDeleteSmallDownloads: false,
AutoDeleteSmallDownloadsBelowMB: 50,
BlurPreviews: false,
TeaserPlayback: "hover",
TeaserAudio: false,
EncryptedCookies: "",
}
settingsFile = "recorder_settings.json"
)
func settingsFilePath() string {
// optionaler Override per ENV
name := strings.TrimSpace(os.Getenv("RECORDER_SETTINGS_FILE"))
if name == "" {
name = settingsFile
}
// Standard: relativ zur EXE / App-Dir (oder fallback auf Working Dir bei go run)
if p, err := resolvePathRelativeToApp(name); err == nil && strings.TrimSpace(p) != "" {
return p
}
// Fallback: so zurückgeben wie es ist
return name
}
func getSettings() RecorderSettings {
settingsMu.Lock()
defer settingsMu.Unlock()
return settings
}
func detectFFmpegPath() string {
// 0. Settings-Override (ffmpegPath in recorder_settings.json / UI)
s := getSettings()
@ -1569,246 +1660,6 @@ func renameGenerated(oldID, newID string) {
}
}
func loadSettings() {
p := settingsFilePath()
b, err := os.ReadFile(p)
fmt.Println("🔧 settingsFile:", p)
if err == nil {
s := getSettings() // ✅ startet mit Defaults
if json.Unmarshal(b, &s) == nil {
if strings.TrimSpace(s.RecordDir) != "" {
s.RecordDir = filepath.Clean(strings.TrimSpace(s.RecordDir))
}
if strings.TrimSpace(s.DoneDir) != "" {
s.DoneDir = filepath.Clean(strings.TrimSpace(s.DoneDir))
}
if strings.TrimSpace(s.FFmpegPath) != "" {
s.FFmpegPath = strings.TrimSpace(s.FFmpegPath)
}
s.TeaserPlayback = strings.ToLower(strings.TrimSpace(s.TeaserPlayback))
if s.TeaserPlayback == "" {
s.TeaserPlayback = "hover"
}
if s.TeaserPlayback != "still" && s.TeaserPlayback != "hover" && s.TeaserPlayback != "all" {
s.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if s.AutoDeleteSmallDownloadsBelowMB < 0 {
s.AutoDeleteSmallDownloadsBelowMB = 0
}
if s.AutoDeleteSmallDownloadsBelowMB > 100_000 {
s.AutoDeleteSmallDownloadsBelowMB = 100_000
}
settingsMu.Lock()
settings = s
settingsMu.Unlock()
}
}
// Ordner sicherstellen
s := getSettings()
recordAbs, _ := resolvePathRelativeToApp(s.RecordDir)
doneAbs, _ := resolvePathRelativeToApp(s.DoneDir)
if strings.TrimSpace(recordAbs) != "" {
_ = os.MkdirAll(recordAbs, 0o755)
}
if strings.TrimSpace(doneAbs) != "" {
_ = os.MkdirAll(doneAbs, 0o755)
}
// ffmpeg-Pfad anhand Settings/Env/PATH bestimmen
ffmpegPath = detectFFmpegPath()
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
}
func saveSettingsToDisk() {
s := getSettings()
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
fmt.Println("⚠️ settings marshal:", err)
return
}
b = append(b, '\n')
p := settingsFilePath()
if err := atomicWriteFile(p, b); err != nil {
fmt.Println("⚠️ settings write:", err)
return
}
// optional
// fmt.Println("✅ settings saved:", p)
}
func recordSettingsHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
case http.MethodPost:
var in RecorderSettings
if err := json.NewDecoder(r.Body).Decode(&in); err != nil {
http.Error(w, "invalid json: "+err.Error(), http.StatusBadRequest)
return
}
// --- normalize (WICHTIG: erst trim, dann leer-check, dann clean) ---
recRaw := strings.TrimSpace(in.RecordDir)
doneRaw := strings.TrimSpace(in.DoneDir)
if recRaw == "" || doneRaw == "" {
http.Error(w, "recordDir und doneDir dürfen nicht leer sein", http.StatusBadRequest)
return
}
in.RecordDir = filepath.Clean(recRaw)
in.DoneDir = filepath.Clean(doneRaw)
// Optional aber sehr empfehlenswert: "." verbieten
if in.RecordDir == "." || in.DoneDir == "." {
http.Error(w, "recordDir/doneDir dürfen nicht '.' sein", http.StatusBadRequest)
return
}
in.FFmpegPath = strings.TrimSpace(in.FFmpegPath)
in.TeaserPlayback = strings.ToLower(strings.TrimSpace(in.TeaserPlayback))
if in.TeaserPlayback == "" {
in.TeaserPlayback = "hover"
}
if in.TeaserPlayback != "still" && in.TeaserPlayback != "hover" && in.TeaserPlayback != "all" {
in.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if in.AutoDeleteSmallDownloadsBelowMB < 0 {
in.AutoDeleteSmallDownloadsBelowMB = 0
}
if in.AutoDeleteSmallDownloadsBelowMB > 100_000 {
in.AutoDeleteSmallDownloadsBelowMB = 100_000
}
// --- ensure folders (Fehler zurückgeben, falls z.B. keine Rechte) ---
recAbs, err := resolvePathRelativeToApp(in.RecordDir)
if err != nil {
http.Error(w, "ungültiger recordDir: "+err.Error(), http.StatusBadRequest)
return
}
doneAbs, err := resolvePathRelativeToApp(in.DoneDir)
if err != nil {
http.Error(w, "ungültiger doneDir: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(recAbs, 0o755); err != nil {
http.Error(w, "konnte recordDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(doneAbs, 0o755); err != nil {
http.Error(w, "konnte doneDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
// ✅ Settings im RAM aktualisieren
settingsMu.Lock()
settings = in
settingsMu.Unlock()
// ✅ Settings auf Disk persistieren
saveSettingsToDisk()
// ✅ ffmpeg/ffprobe nach Änderungen neu bestimmen
// Tipp: wenn der User FFmpegPath explizit setzt, nutze den direkt.
if strings.TrimSpace(in.FFmpegPath) != "" {
ffmpegPath = in.FFmpegPath
} else {
ffmpegPath = detectFFmpegPath()
}
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
default:
http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed)
return
}
}
func settingsBrowse(w http.ResponseWriter, r *http.Request) {
target := r.URL.Query().Get("target")
if target != "record" && target != "done" && target != "ffmpeg" {
http.Error(w, "target muss record, done oder ffmpeg sein", http.StatusBadRequest)
return
}
var (
p string
err error
)
if target == "ffmpeg" {
// Dateiauswahl für ffmpeg.exe
p, err = dialog.File().
Title("ffmpeg.exe auswählen").
Load()
} else {
// Ordnerauswahl für record/done
p, err = dialog.Directory().
Title("Ordner auswählen").
Browse()
}
if err != nil {
// User cancelled → 204 No Content ist praktisch fürs Frontend
if strings.Contains(strings.ToLower(err.Error()), "cancel") {
w.WriteHeader(http.StatusNoContent)
return
}
http.Error(w, "auswahl fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
// optional: wenn innerhalb exe-dir, als RELATIV zurückgeben
p = maybeMakeRelativeToExe(p)
w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(map[string]string{"path": p})
}
func maybeMakeRelativeToExe(abs string) string {
exe, err := os.Executable()
if err != nil {
return abs
}
base := filepath.Dir(exe)
rel, err := filepath.Rel(base, abs)
if err != nil {
return abs
}
// wenn rel mit ".." beginnt -> nicht innerhalb base -> absoluten Pfad behalten
if rel == "." || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
return abs
}
return filepath.ToSlash(rel) // frontend-freundlich
}
// --- Gemeinsame Status-Werte für MFC ---
type Status int

Binary file not shown.

View File

@ -3,12 +3,13 @@
package main
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"net"
"net/http"
"net/url"
"os"
@ -21,6 +22,7 @@ import (
"strings"
"sync"
"sync/atomic"
"syscall"
"time"
)
@ -209,45 +211,177 @@ func startRecordingFromRequest(w http.ResponseWriter, r *http.Request) {
_ = json.NewEncoder(w).Encode(job)
}
func recordVideo(w http.ResponseWriter, r *http.Request) {
// ---- track if headers/body were already written ----
// (Go methods must be at package scope)
type rwTrack struct {
http.ResponseWriter
wrote bool
}
func (t *rwTrack) WriteHeader(statusCode int) {
if t.wrote {
return
}
t.wrote = true
t.ResponseWriter.WriteHeader(statusCode)
}
func (t *rwTrack) Write(p []byte) (int, error) {
if !t.wrote {
t.wrote = true
}
return t.ResponseWriter.Write(p)
}
func recordVideo(w http.ResponseWriter, r *http.Request) {
// ---- wrap writer to detect "already wrote" ----
tw := &rwTrack{ResponseWriter: w}
w = tw
writeErr := func(code int, msg string) {
// Wenn schon Header/Body raus sind, dürfen wir KEIN http.Error mehr machen,
// sonst gibt's "superfluous response.WriteHeader".
if tw.wrote {
fmt.Println("[recordVideo] late error (headers already sent):", code, msg)
return
}
http.Error(w, msg, code) // nutzt WriteHeader+Write -> tw.wrote wird automatisch true
}
writeStatus := func(code int) {
if tw.wrote {
return
}
w.WriteHeader(code) // geht durch rwTrack.WriteHeader
}
// ---- CORS ----
origin := r.Header.Get("Origin")
if origin != "" {
// ✅ dev origin erlauben (oder "*" wenns dir egal ist)
w.Header().Set("Access-Control-Allow-Origin", origin)
w.Header().Set("Vary", "Origin")
w.Header().Set("Access-Control-Allow-Methods", "GET,HEAD,OPTIONS")
w.Header().Set("Access-Control-Allow-Headers", "Range")
w.Header().Set("Access-Control-Expose-Headers", "Content-Length, Content-Range, Accept-Ranges")
// Wichtig: Browser schicken bei Video-Range-Requests oft If-Range / If-Modified-Since / If-None-Match.
// Wenn du die nicht erlaubst, schlägt der Preflight fehl -> VideoJS sieht "NETWORK error".
w.Header().Set("Access-Control-Allow-Headers", "Range, If-Range, If-Modified-Since, If-None-Match")
w.Header().Set("Access-Control-Expose-Headers", "Content-Length, Content-Range, Accept-Ranges, ETag, Last-Modified, X-Transcode-Offset-Seconds")
w.Header().Set("Access-Control-Allow-Credentials", "true")
}
if r.Method == http.MethodOptions {
w.WriteHeader(http.StatusNoContent)
writeStatus(http.StatusNoContent)
return
}
// ✅ einmal lesen (für beide Zweige) + normalisieren
q := strings.TrimSpace(r.URL.Query().Get("quality"))
if strings.EqualFold(q, "auto") {
q = ""
// ---- query normalize ----
// Neu: resolution=LOW|MEDIUM|HIGH|ORIGINAL
res := strings.TrimSpace(r.URL.Query().Get("resolution"))
// Backwards-Compat: falls altes Frontend noch quality nutzt
if res == "" {
res = strings.TrimSpace(r.URL.Query().Get("quality"))
}
if q != "" {
// früh validieren (liefert sauberen 400 statt später 500)
if _, ok := profileFromQuality(q); !ok {
http.Error(w, "ungültige quality", http.StatusBadRequest)
// Normalize: auto/original => leer (== "ORIGINAL" Profil)
if strings.EqualFold(res, "auto") || strings.EqualFold(res, "original") {
res = ""
}
// Validieren (wenn gesetzt)
if res != "" {
if _, ok := profileFromResolution(res); !ok {
writeErr(http.StatusBadRequest, "ungültige resolution")
return
}
}
fmt.Println("[recordVideo] quality="+q, "file="+r.URL.Query().Get("file"), "id="+r.URL.Query().Get("id"))
rawProgress := strings.TrimSpace(r.URL.Query().Get("progress"))
if rawProgress == "" {
rawProgress = strings.TrimSpace(r.URL.Query().Get("p"))
}
// ✅ Wiedergabe über Dateiname (für doneDir / recordDir)
if raw := strings.TrimSpace(r.URL.Query().Get("file")); raw != "" {
// explizit decoden (zur Sicherheit)
file, err := url.QueryUnescape(raw)
// ---- startSec parse (seek position in seconds) ----
startSec := 0
startFrac := -1.0 // wenn 0..1 => Progress-Fraction (currentProgress)
raw := strings.TrimSpace(r.URL.Query().Get("start"))
if raw == "" {
raw = strings.TrimSpace(r.URL.Query().Get("t"))
}
parseFracOrSeconds := func(s string) {
s = strings.TrimSpace(s)
if s == "" {
return
}
// allow "hh:mm:ss" / "mm:ss"
if strings.Contains(s, ":") {
parts := strings.Split(s, ":")
ok := true
vals := make([]int, 0, len(parts))
for _, p := range parts {
p = strings.TrimSpace(p)
n, err := strconv.Atoi(p)
if err != nil || n < 0 {
ok = false
break
}
vals = append(vals, n)
}
if ok {
if len(vals) == 2 {
startSec = vals[0]*60 + vals[1]
return
} else if len(vals) == 3 {
startSec = vals[0]*3600 + vals[1]*60 + vals[2]
return
}
}
return
}
// number: seconds OR fraction
f, err := strconv.ParseFloat(s, 64)
if err != nil {
http.Error(w, "ungültiger file", http.StatusBadRequest)
return
}
if f <= 0 {
return
}
// < 1.0 => treat as fraction (currentProgress)
if f > 0 && f < 1.0 {
startFrac = f
return
}
// >= 1.0 => treat as seconds (floor)
startSec = int(f)
}
parseFracOrSeconds(raw)
// optional explicit progress overrides fraction
if rawProgress != "" {
f, err := strconv.ParseFloat(strings.TrimSpace(rawProgress), 64)
if err == nil && f > 0 && f < 1.0 {
startFrac = f
}
}
if startSec < 0 {
startSec = 0
}
// ---- resolve outPath from file or id ----
resolveOutPath := func() (string, bool) {
// ✅ Wiedergabe über Dateiname (für doneDir / recordDir)
if rawFile := strings.TrimSpace(r.URL.Query().Get("file")); rawFile != "" {
file, err := url.QueryUnescape(rawFile)
if err != nil {
writeErr(http.StatusBadRequest, "ungültiger file")
return "", false
}
file = strings.TrimSpace(file)
// kein Pfad, keine Backslashes, kein Traversal
@ -255,80 +389,136 @@ func recordVideo(w http.ResponseWriter, r *http.Request) {
strings.Contains(file, "/") ||
strings.Contains(file, "\\") ||
filepath.Base(file) != file {
http.Error(w, "ungültiger file", http.StatusBadRequest)
return
writeErr(http.StatusBadRequest, "ungültiger file")
return "", false
}
ext := strings.ToLower(filepath.Ext(file))
if ext != ".mp4" && ext != ".ts" {
http.Error(w, "nicht erlaubt", http.StatusForbidden)
return
writeErr(http.StatusForbidden, "nicht erlaubt")
return "", false
}
s := getSettings()
recordAbs, err := resolvePathRelativeToApp(s.RecordDir)
if err != nil {
http.Error(w, "recordDir auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
writeErr(http.StatusInternalServerError, "recordDir auflösung fehlgeschlagen: "+err.Error())
return "", false
}
doneAbs, err := resolvePathRelativeToApp(s.DoneDir)
if err != nil {
http.Error(w, "doneDir auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
writeErr(http.StatusInternalServerError, "doneDir auflösung fehlgeschlagen: "+err.Error())
return "", false
}
// Kandidaten: erst done (inkl. 1 Level Subdir, aber ohne "keep"),
// dann keep (inkl. 1 Level Subdir), dann recordDir
names := []string{file}
// Falls UI noch ".ts" kennt, die Datei aber schon als ".mp4" existiert:
if ext == ".ts" {
mp4File := strings.TrimSuffix(file, ext) + ".mp4"
names = append(names, mp4File)
names = append(names, strings.TrimSuffix(file, ext)+".mp4")
}
var outPath string
for _, name := range names {
// done root + done/<subdir>/ (skip "keep")
if p, _, ok := findFileInDirOrOneLevelSubdirs(doneAbs, name, "keep"); ok {
outPath = p
break
}
// keep root + keep/<subdir>/
if p, _, ok := findFileInDirOrOneLevelSubdirs(filepath.Join(doneAbs, "keep"), name, ""); ok {
outPath = p
break
}
// record root (+ optional 1 Level Subdir)
if p, _, ok := findFileInDirOrOneLevelSubdirs(recordAbs, name, ""); ok {
outPath = p
break
}
}
if outPath == "" {
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
writeErr(http.StatusNotFound, "datei nicht gefunden")
return "", false
}
return filepath.Clean(strings.TrimSpace(outPath)), true
}
// ✅ ALT: Wiedergabe über Job-ID (funktioniert nur solange Job im RAM existiert)
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
writeErr(http.StatusBadRequest, "id fehlt")
return "", false
}
jobsMu.Lock()
job, ok := jobs[id]
jobsMu.Unlock()
if !ok {
writeErr(http.StatusNotFound, "job nicht gefunden")
return "", false
}
outPath := filepath.Clean(strings.TrimSpace(job.Output))
if outPath == "" {
writeErr(http.StatusNotFound, "output fehlt")
return "", false
}
if !filepath.IsAbs(outPath) {
abs, err := resolvePathRelativeToApp(outPath)
if err != nil {
writeErr(http.StatusInternalServerError, "pfad auflösung fehlgeschlagen: "+err.Error())
return "", false
}
outPath = abs
}
fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 {
writeErr(http.StatusNotFound, "datei nicht gefunden")
return "", false
}
return outPath, true
}
outPath, ok := resolveOutPath()
if !ok {
return
}
outPath = filepath.Clean(strings.TrimSpace(outPath))
// 1) ✅ TS -> MP4 (on-demand remux)
// ---- convert progress fraction to seconds (if needed) ----
if startSec == 0 && startFrac > 0 && startFrac < 1.0 {
// ffprobe duration (cached)
if err := ensureFFprobeAvailable(); err == nil {
pctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
dur, derr := getVideoDurationSecondsCached(pctx, outPath)
cancel()
if derr == nil && dur > 0 {
startSec = int(startFrac * dur)
}
}
}
// sanitize + optional bucket align (wie bei GOP-ish seeking)
if startSec < 0 {
startSec = 0
}
startSec = (startSec / 2) * 2
// ---- TS -> MP4 (on-demand remux) ----
if strings.ToLower(filepath.Ext(outPath)) == ".ts" {
newOut, err := maybeRemuxTS(outPath)
if err != nil {
http.Error(w, "TS kann im Browser nicht abgespielt werden; Remux fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
writeErr(http.StatusInternalServerError, "TS Remux fehlgeschlagen: "+err.Error())
return
}
if strings.TrimSpace(newOut) == "" {
http.Error(w, "TS kann im Browser nicht abgespielt werden; Remux hat keine MP4 erzeugt", http.StatusInternalServerError)
writeErr(http.StatusInternalServerError, "TS kann im Browser nicht abgespielt werden; Remux hat keine MP4 erzeugt")
return
}
outPath = filepath.Clean(strings.TrimSpace(newOut))
// sicherstellen, dass wirklich eine MP4 existiert
fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" {
http.Error(w, "Remux-Ergebnis ungültig", http.StatusInternalServerError)
writeErr(http.StatusInternalServerError, "Remux-Ergebnis ungültig")
return
}
}
@ -340,110 +530,94 @@ func recordVideo(w http.ResponseWriter, r *http.Request) {
case "ts":
newOut, err := maybeRemuxTS(outPath)
if err != nil {
http.Error(w, "Datei ist TS (nur .mp4 benannt); Remux fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
writeErr(http.StatusInternalServerError, "Datei ist TS (nur .mp4 benannt); Remux fehlgeschlagen: "+err.Error())
return
}
outPath = filepath.Clean(strings.TrimSpace(newOut))
case "html":
http.Error(w, "Server liefert HTML statt Video (Pfad/Lookup prüfen)", http.StatusInternalServerError)
writeErr(http.StatusInternalServerError, "Server liefert HTML statt Video (Pfad/Lookup prüfen)")
return
}
}
// 2) ✅ MP4 -> Quality Transcode (on-demand)
// ---- Quality / Transcode handling ----
w.Header().Set("Cache-Control", "no-store")
stream := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("stream")))
wantStream := stream == "1" || stream == "true" || stream == "yes"
if q != "" && wantStream {
prof, _ := profileFromQuality(q)
// ✅ Wenn quality gesetzt ist:
if res != "" {
prof, _ := profileFromResolution(res)
// ⚠️ Streaming-Transcode: startet Playback bevor fertig
if err := serveTranscodedStream(r.Context(), w, outPath, prof); err != nil {
http.Error(w, "transcode stream failed: "+err.Error(), http.StatusInternalServerError)
return
}
return
}
if q != "" {
var terr error
outPath, terr = maybeTranscodeForRequest(r.Context(), outPath, q)
if terr != nil {
http.Error(w, "transcode failed: "+terr.Error(), http.StatusInternalServerError)
return
}
}
// ✅ wenn Quelle schon <= Zielhöhe: ORIGINAL liefern
// ABER NUR wenn wir NICHT seeken und NICHT streamen wollen.
if prof.Height > 0 && startSec == 0 && !wantStream {
if err := ensureFFprobeAvailable(); err == nil {
pctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
if srcH, err := getVideoHeightCached(pctx, outPath); err == nil && srcH > 0 {
if srcH <= prof.Height+8 {
serveVideoFile(w, r, outPath)
return
}
// ✅ ALT: Wiedergabe über Job-ID (funktioniert nur solange Job im RAM existiert)
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
http.Error(w, "id fehlt", http.StatusBadRequest)
return
}
jobsMu.Lock()
job, ok := jobs[id]
jobsMu.Unlock()
if !ok {
http.Error(w, "job nicht gefunden", http.StatusNotFound)
return
}
outPath := filepath.Clean(strings.TrimSpace(job.Output))
if outPath == "" {
http.Error(w, "output fehlt", http.StatusNotFound)
return
}
if !filepath.IsAbs(outPath) {
abs, err := resolvePathRelativeToApp(outPath)
if err != nil {
http.Error(w, "pfad auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
outPath = abs
}
fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 {
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
return
}
// 1) ✅ TS -> MP4 (on-demand remux)
if strings.ToLower(filepath.Ext(outPath)) == ".ts" {
newOut, err := maybeRemuxTS(outPath)
if err != nil {
http.Error(w, "TS Remux fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
if strings.TrimSpace(newOut) == "" {
http.Error(w, "TS kann im Browser nicht abgespielt werden; Remux hat keine MP4 erzeugt", http.StatusInternalServerError)
return
}
outPath = filepath.Clean(strings.TrimSpace(newOut))
fi, err := os.Stat(outPath)
if err != nil || fi.IsDir() || fi.Size() == 0 || strings.ToLower(filepath.Ext(outPath)) != ".mp4" {
http.Error(w, "Remux-Ergebnis ungültig", http.StatusInternalServerError)
return
}
}
// 2) ✅ MP4 -> Quality Transcode (on-demand)
w.Header().Set("Cache-Control", "no-store")
if q != "" {
var terr error
outPath, terr = maybeTranscodeForRequest(r.Context(), outPath, q)
// ✅ 1) Seek (startSec>0): Standard = Segment-Datei transcodieren & dann normal ausliefern (Range-fähig)
// stream=1 kann weiterhin den "live pipe" erzwingen.
if startSec > 0 && !wantStream {
segPath, terr := maybeTranscodeForRequest(r.Context(), outPath, res, startSec)
if terr != nil {
http.Error(w, "transcode failed: "+terr.Error(), http.StatusInternalServerError)
writeErr(http.StatusInternalServerError, "transcode failed: "+terr.Error())
return
}
// ✅ Offset NUR setzen, wenn wir wirklich ab startSec ausliefern (Segment)
w.Header().Set("X-Transcode-Offset-Seconds", strconv.Itoa(startSec))
serveVideoFile(w, r, segPath)
return
}
// ✅ 2) stream=1 ODER startSec>0 mit stream=true: pipe-stream
if wantStream || startSec > 0 {
if startSec > 0 {
// ✅ Offset NUR setzen, wenn wir wirklich ab startSec ausliefern (Stream)
w.Header().Set("X-Transcode-Offset-Seconds", strconv.Itoa(startSec))
}
if err := serveTranscodedStreamAt(r.Context(), w, outPath, prof, startSec); err != nil {
if errors.Is(err, context.Canceled) {
return
}
writeErr(http.StatusInternalServerError, "transcode stream failed: "+err.Error())
return
}
return
}
// ✅ 3) startSec==0: Full-file Cache-Transcode (wie vorher)
if startSec == 0 {
segPath, terr := maybeTranscodeForRequest(r.Context(), outPath, res, 0)
if terr != nil {
writeErr(http.StatusInternalServerError, "transcode failed: "+terr.Error())
return
}
serveVideoFile(w, r, segPath)
return
}
}
// ✅ Full-file Cache-Transcode nur wenn startSec == 0
if res != "" && startSec == 0 {
var terr error
outPath, terr = maybeTranscodeForRequest(r.Context(), outPath, res, startSec)
if terr != nil {
writeErr(http.StatusInternalServerError, "transcode failed: "+terr.Error())
return
}
}
@ -464,18 +638,50 @@ func (fw flushWriter) Write(p []byte) (int, error) {
return n, err
}
func isClientDisconnectErr(err error) bool {
if err == nil {
return false
}
if errors.Is(err, context.Canceled) || errors.Is(err, net.ErrClosed) || errors.Is(err, io.ErrClosedPipe) {
return true
}
// Windows / net/http typische Fälle
var op *net.OpError
if errors.As(err, &op) {
// op.Err kann syscall.Errno(10054/10053/...) sein
if se, ok := op.Err.(syscall.Errno); ok {
switch int(se) {
case 10054, 10053, 10058: // WSAECONNRESET, WSAECONNABORTED, WSAESHUTDOWN
return true
}
}
}
msg := strings.ToLower(err.Error())
if strings.Contains(msg, "broken pipe") ||
strings.Contains(msg, "connection reset") ||
strings.Contains(msg, "forcibly closed") ||
strings.Contains(msg, "wsasend") ||
strings.Contains(msg, "wsarecv") {
return true
}
return false
}
func serveTranscodedStream(ctx context.Context, w http.ResponseWriter, inPath string, prof TranscodeProfile) error {
return serveTranscodedStreamAt(ctx, w, inPath, prof, 0)
}
func serveTranscodedStreamAt(ctx context.Context, w http.ResponseWriter, inPath string, prof TranscodeProfile, startSec int) error {
if err := ensureFFmpegAvailable(); err != nil {
return err
}
// Header vor dem ersten Write setzen
w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Cache-Control", "no-store")
// Range macht bei Pipe-Streaming i.d.R. keinen Sinn:
w.Header().Set("Accept-Ranges", "none")
// ffmpeg args (mit -ss vor -i)
args := buildFFmpegStreamArgsAt(inPath, prof, startSec)
args := buildFFmpegStreamArgs(inPath, prof)
cmd := exec.CommandContext(ctx, "ffmpeg", args...)
stdout, err := cmd.StdoutPipe()
@ -483,34 +689,74 @@ func serveTranscodedStream(ctx context.Context, w http.ResponseWriter, inPath st
return err
}
var stderr bytes.Buffer
cmd.Stderr = &stderr
stderr, err := cmd.StderrPipe()
if err != nil {
return err
}
if err := cmd.Start(); err != nil {
return err
}
defer func() { _ = stdout.Close() }()
flusher, _ := w.(http.Flusher)
fw := flushWriter{w: w, f: flusher}
// stderr MUSS gelesen werden, sonst kann ffmpeg blockieren
go func() {
_, _ = io.ReadAll(stderr)
_ = cmd.Wait()
}()
buf := make([]byte, 64*1024)
_, copyErr := io.CopyBuffer(fw, stdout, buf)
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Accept-Ranges", "none")
w.WriteHeader(http.StatusOK)
waitErr := cmd.Wait()
// Wenn Client abbricht, ist ctx meist canceled -> nicht als "echter" Fehler behandeln
if ctx.Err() != nil {
return ctx.Err()
// kontinuierlich flushen
var out io.Writer = w
if f, ok := w.(http.Flusher); ok {
out = flushWriter{w: w, f: f}
}
_, copyErr := io.Copy(out, stdout)
// Client abgebrochen -> kein Fehler
if copyErr != nil {
return fmt.Errorf("stream copy failed: %w", copyErr)
}
if waitErr != nil {
return fmt.Errorf("ffmpeg failed: %w (stderr=%s)", waitErr, strings.TrimSpace(stderr.String()))
}
if isClientDisconnectErr(copyErr) {
return nil
}
}
// Wenn der Request context weg ist: ebenfalls ok (Quality-Wechsel, Seek, Tab zu)
if ctx.Err() != nil && errors.Is(ctx.Err(), context.Canceled) {
return nil
}
return copyErr
}
func buildFFmpegStreamArgsAt(inPath string, prof TranscodeProfile, startSec int) []string {
args := buildFFmpegStreamArgs(inPath, prof)
if startSec <= 0 {
return args
}
// Insert "-ss <sec>" before "-i"
out := make([]string, 0, len(args)+2)
inserted := false
for i := 0; i < len(args); i++ {
if !inserted && args[i] == "-i" {
out = append(out, "-ss", strconv.Itoa(startSec))
inserted = true
}
out = append(out, args[i])
}
// Fallback: falls "-i" nicht gefunden wird, häng's vorne dran
if !inserted {
return append([]string{"-ss", strconv.Itoa(startSec)}, args...)
}
return out
}
func recordStatus(w http.ResponseWriter, r *http.Request) {
@ -1146,10 +1392,8 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) {
}
}
// 2) Fallback: RAM-Cache only (immer noch schnell, kein ffprobe)
if dur <= 0 {
dur = durationSecondsCacheOnly(full, fi)
}
// ✅ Kein Cache-only Fallback hier.
// Wenn meta fehlt, bleibt dur erstmal 0 und wird beim Ausliefern (Pagination) via ensureVideoMetaForFileBestEffort erzeugt.
ended := t
mk := modelFromFullPath(full)
@ -1357,8 +1601,43 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) {
// Response jobs bauen
out := make([]*RecordJob, 0, max(0, end-start))
for _, i := range idx[start:end] {
out = append(out, items[i].job)
for _, ii := range idx[start:end] {
base := items[ii].job
if base == nil {
continue
}
// ✅ Kopie erzeugen (wichtig: keine Race/Mutations am Cache-Objekt)
c := *base
// ✅ Meta immer aus meta.json (ggf. generieren, wenn fehlt)
// Kurzes Timeout pro Item, damit eine Seite nicht "hängen" kann.
pctx, cancel := context.WithTimeout(r.Context(), 3*time.Second)
m, ok := ensureVideoMetaForFileBestEffort(pctx, c.Output, c.SourceURL)
cancel()
// Wenn Meta ok: Felder IMMER daraus setzen
if ok && m != nil {
c.Meta = m
c.DurationSeconds = m.DurationSeconds
c.SizeBytes = m.FileSize
c.VideoWidth = m.VideoWidth
c.VideoHeight = m.VideoHeight
c.FPS = m.FPS
// SourceURL: wenn Job leer, aus Meta übernehmen
if strings.TrimSpace(c.SourceURL) == "" && strings.TrimSpace(m.SourceURL) != "" {
c.SourceURL = strings.TrimSpace(m.SourceURL)
}
} else {
// Falls wirklich gar keine Meta gebaut werden kann: wenigstens Size korrekt setzen
if fi, err := os.Stat(c.Output); err == nil && fi != nil && !fi.IsDir() && fi.Size() > 0 {
c.SizeBytes = fi.Size()
}
}
out = append(out, &c)
}
w.Header().Set("Content-Type", "application/json")
@ -1490,6 +1769,16 @@ func recordDeleteVideo(w http.ResponseWriter, r *http.Request) {
}
}
// ✅ NEU: auch Transcode-Cache zum endgültig gelöschten Video entfernen
if prevCanonical != "" {
removeTranscodesForID(doneAbs, prevCanonical)
// Best-effort (falls irgendwo doch mal abweichende IDs genutzt wurden)
if prevBase != "" && prevBase != prevCanonical {
removeTranscodesForID(doneAbs, stripHotPrefix(prevBase))
}
}
if err := os.MkdirAll(trashDir, 0o755); err != nil {
http.Error(w, "trash dir erstellen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return

View File

@ -27,15 +27,15 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
rangeFor := func(ph string) rng {
switch ph {
case "postwork":
return rng{70, 72}
return rng{0, 5}
case "remuxing":
return rng{72, 78}
return rng{5, 65}
case "moving":
return rng{78, 84}
return rng{65, 75}
case "probe":
return rng{84, 86}
return rng{75, 80}
case "assets":
return rng{86, 99}
return rng{80, 99}
default:
return rng{0, 100}
}
@ -58,6 +58,14 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
job.Phase = phase
}
// ✅ Sonderfall: "wartet auf Nachbearbeitung" => Progress bleibt 0%
// Erwartung: Caller sendet phase="postwork" und pct=0 solange nur gewartet wird.
// Muss vor "niemals rückwärts" passieren, sonst käme man von Recording-Progress nicht mehr auf 0.
if phaseLower == "postwork" && pct == 0 {
job.Progress = 0
return
}
// Progress-Logik:
// - wenn wir in Postwork sind und jemand phasenlokale 0..100 liefert (z.B. remuxing 25),
// mappe das in den globalen Bereich der Phase.
@ -66,14 +74,20 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
if inPostwork {
r := rangeFor(phaseLower)
if r.start > 0 && r.end >= r.start {
// Wenn pct kleiner ist als unser globaler Einstiegspunkt, interpretieren wir ihn als lokal (0..100)
// und mappen in [start..end].
if pct < r.start {
if r.end >= r.start {
// Heuristik:
// - Wenn pct bereits im globalen Bereich der Phase liegt => als global interpretieren, clampen.
// - Sonst => als lokales 0..100 interpretieren und in [start..end] mappen.
if pct >= r.start && pct <= r.end {
// schon global
mapped = pct
} else {
// lokal 0..100 -> global
width := float64(r.end - r.start)
mapped = r.start + int(math.Round((float64(pct)/100.0)*width))
} else {
// Wenn schon "global" geliefert wird, trotzdem in den Bereich begrenzen
}
// clamp in den Bereich
if mapped < r.start {
mapped = r.start
}
@ -82,7 +96,6 @@ func setJobProgress(job *RecordJob, phase string, pct int) {
}
}
}
}
// niemals rückwärts
if mapped < job.Progress {

View File

@ -107,7 +107,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
}
// ✅ Phase für Recording explizit setzen (damit spätere Progress-Writer das erkennen können)
setJobProgress(job, "recording", 1)
setJobProgress(job, "recording", 0)
notifyJobsChanged()
// ---- Aufnahme starten (Output-Pfad sauber relativ zur EXE auflösen) ----
@ -173,6 +173,10 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
err = errors.New("unsupported provider")
}
if err != nil && shouldLogRecordError(err, provider, req) {
fmt.Println("❌ [record]", provider, job.SourceURL, "->", err)
}
// ---- Recording fertig: EndedAt/Error setzen ----
end := time.Now()
@ -201,10 +205,12 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
// ✅ WICHTIG: sofort Phase wechseln, damit Recorder-Progress danach nichts mehr “zurücksetzt”
job.Phase = "postwork"
/*
// ✅ Progress darf ab jetzt nicht mehr runtergehen (mind. Einstieg in Postwork)
if job.Progress < 70 {
job.Progress = 70
}
*/
out := strings.TrimSpace(job.Output)
jobsMu.Unlock()
@ -256,7 +262,10 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
notifyJobsChanged()
notifyDoneChanged()
fmt.Println("🧹 auto-deleted (pre-queue):", base, "| size:", formatBytesSI(fi.Size()))
if shouldLogRecordInfo(req) {
fmt.Println("🧹 auto-deleted (pre-queue):", base, "(size: "+formatBytesSI(fi.Size())+")")
}
return
} else {
fmt.Println("⚠️ auto-delete (pre-queue) failed:", derr)
@ -305,7 +314,7 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) {
jobsMu.Unlock()
// optisches "queued" bumping
setJobProgress(job, "postwork", 71)
setJobProgress(job, "postwork", 0)
notifyJobsChanged()

View File

@ -1,3 +1,5 @@
// backend\record_stream_cb.go
package main
import (
@ -49,11 +51,6 @@ func RecordStream(
return fmt.Errorf("playlist abrufen: %w", err)
}
// ✅ Job erst jetzt sichtbar machen (Stream wirklich verfügbar)
if job != nil {
_ = publishJob(job.ID)
}
if job != nil && strings.TrimSpace(job.PreviewDir) == "" {
assetID := assetIDForJob(job)
if strings.TrimSpace(assetID) == "" {
@ -75,9 +72,6 @@ func RecordStream(
if err != nil {
return fmt.Errorf("datei erstellen: %w", err)
}
if job != nil {
_ = publishJob(job.ID)
}
defer func() {
_ = file.Close()
@ -88,6 +82,8 @@ func RecordStream(
var lastPush time.Time
var lastBytes int64
published := false
// 5) Segmente „watchen“ analog zu WatchSegments + HandleSegment im DVR
err = playlist.WatchSegments(ctx, hc, httpCookie, func(b []byte, duration float64) error {
// Hier wäre im DVR ch.HandleSegment bei dir einfach in eine Datei schreiben
@ -95,6 +91,12 @@ func RecordStream(
return fmt.Errorf("schreibe segment: %w", err)
}
// ✅ erst sichtbar machen, wenn wirklich Bytes geschrieben wurden
if job != nil && !published {
published = true
_ = publishJob(job.ID)
}
// ✅ live size (UI) throttled
written += int64(len(b))
if job != nil {

View File

@ -1,3 +1,5 @@
// backend\record_stream_mfc.go
package main
import (

View File

@ -14,34 +14,36 @@ import (
"time"
)
func serveVideoFile(w http.ResponseWriter, r *http.Request, path string) {
f, err := openForReadShareDelete(path)
func serveVideoFile(w http.ResponseWriter, r *http.Request, filePath string) {
f, err := os.Open(filePath)
if err != nil {
http.Error(w, "datei öffnen fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
http.Error(w, "open failed: "+err.Error(), http.StatusNotFound)
return
}
defer f.Close()
fi, err := f.Stat()
if err != nil || fi.IsDir() || fi.Size() == 0 {
http.Error(w, "datei nicht gefunden", http.StatusNotFound)
if err != nil || fi.IsDir() || fi.Size() <= 0 {
http.Error(w, "file not found", http.StatusNotFound)
return
}
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("Accept-Ranges", "bytes")
w.Header().Set("X-Content-Type-Options", "nosniff")
ext := strings.ToLower(filepath.Ext(path))
ext := strings.ToLower(filepath.Ext(filePath))
switch ext {
case ".mp4":
w.Header().Set("Content-Type", "video/mp4")
case ".ts":
w.Header().Set("Content-Type", "video/mp2t")
default:
w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Content-Type", "application/octet-stream")
}
// ServeContent unterstützt Range Requests (wichtig für Video)
http.ServeContent(w, r, filepath.Base(path), fi.ModTime(), f)
// Range-Support (http.ServeContent macht 206/Content-Range automatisch, wenn Range kommt)
w.Header().Set("Accept-Ranges", "bytes")
w.Header().Set("Cache-Control", "no-store")
// ServeContent setzt Content-Length/Last-Modified/ETag-Handling korrekt
http.ServeContent(w, r, filepath.Base(filePath), fi.ModTime(), f)
}
func sniffVideoKind(path string) (string, error) {

325
backend/settings.go Normal file
View File

@ -0,0 +1,325 @@
// backend\settings.go
package main
import (
"encoding/json"
"fmt"
"net/http"
"os"
"path/filepath"
"strings"
"sync"
"github.com/sqweek/dialog"
)
type RecorderSettings struct {
RecordDir string `json:"recordDir"`
DoneDir string `json:"doneDir"`
FFmpegPath string `json:"ffmpegPath"`
AutoAddToDownloadList bool `json:"autoAddToDownloadList"`
AutoStartAddedDownloads bool `json:"autoStartAddedDownloads"`
UseChaturbateAPI bool `json:"useChaturbateApi"`
UseMyFreeCamsWatcher bool `json:"useMyFreeCamsWatcher"`
// Wenn aktiv, werden fertige Downloads automatisch gelöscht, wenn sie kleiner als der Grenzwert sind.
AutoDeleteSmallDownloads bool `json:"autoDeleteSmallDownloads"`
AutoDeleteSmallDownloadsBelowMB int `json:"autoDeleteSmallDownloadsBelowMB"`
BlurPreviews bool `json:"blurPreviews"`
TeaserPlayback string `json:"teaserPlayback"` // still | hover | all
TeaserAudio bool `json:"teaserAudio"` // ✅ Vorschau/Teaser mit Ton abspielen
EnableNotifications bool `json:"enableNotifications"`
// EncryptedCookies contains base64(nonce+ciphertext) of a JSON cookie map.
EncryptedCookies string `json:"encryptedCookies"`
}
var (
settingsMu sync.Mutex
settings = RecorderSettings{
RecordDir: "/records",
DoneDir: "/records/done",
FFmpegPath: "",
AutoAddToDownloadList: false,
AutoStartAddedDownloads: false,
UseChaturbateAPI: false,
UseMyFreeCamsWatcher: false,
AutoDeleteSmallDownloads: false,
AutoDeleteSmallDownloadsBelowMB: 50,
BlurPreviews: false,
TeaserPlayback: "hover",
TeaserAudio: false,
EnableNotifications: true,
EncryptedCookies: "",
}
settingsFile = "recorder_settings.json"
)
func settingsFilePath() string {
// optionaler Override per ENV
name := strings.TrimSpace(os.Getenv("RECORDER_SETTINGS_FILE"))
if name == "" {
name = settingsFile
}
// Standard: relativ zur EXE / App-Dir (oder fallback auf Working Dir bei go run)
if p, err := resolvePathRelativeToApp(name); err == nil && strings.TrimSpace(p) != "" {
return p
}
// Fallback: so zurückgeben wie es ist
return name
}
func getSettings() RecorderSettings {
settingsMu.Lock()
defer settingsMu.Unlock()
return settings
}
func loadSettings() {
p := settingsFilePath()
b, err := os.ReadFile(p)
fmt.Println("🔧 settingsFile:", p)
if err == nil {
s := getSettings() // ✅ startet mit Defaults
if json.Unmarshal(b, &s) == nil {
if strings.TrimSpace(s.RecordDir) != "" {
s.RecordDir = filepath.Clean(strings.TrimSpace(s.RecordDir))
}
if strings.TrimSpace(s.DoneDir) != "" {
s.DoneDir = filepath.Clean(strings.TrimSpace(s.DoneDir))
}
if strings.TrimSpace(s.FFmpegPath) != "" {
s.FFmpegPath = strings.TrimSpace(s.FFmpegPath)
}
s.TeaserPlayback = strings.ToLower(strings.TrimSpace(s.TeaserPlayback))
if s.TeaserPlayback == "" {
s.TeaserPlayback = "hover"
}
if s.TeaserPlayback != "still" && s.TeaserPlayback != "hover" && s.TeaserPlayback != "all" {
s.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if s.AutoDeleteSmallDownloadsBelowMB < 0 {
s.AutoDeleteSmallDownloadsBelowMB = 0
}
if s.AutoDeleteSmallDownloadsBelowMB > 100_000 {
s.AutoDeleteSmallDownloadsBelowMB = 100_000
}
settingsMu.Lock()
settings = s
settingsMu.Unlock()
}
}
// Ordner sicherstellen
s := getSettings()
recordAbs, _ := resolvePathRelativeToApp(s.RecordDir)
doneAbs, _ := resolvePathRelativeToApp(s.DoneDir)
if strings.TrimSpace(recordAbs) != "" {
_ = os.MkdirAll(recordAbs, 0o755)
}
if strings.TrimSpace(doneAbs) != "" {
_ = os.MkdirAll(doneAbs, 0o755)
}
// ffmpeg-Pfad anhand Settings/Env/PATH bestimmen
ffmpegPath = detectFFmpegPath()
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
}
func saveSettingsToDisk() {
s := getSettings()
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
fmt.Println("⚠️ settings marshal:", err)
return
}
b = append(b, '\n')
p := settingsFilePath()
if err := atomicWriteFile(p, b); err != nil {
fmt.Println("⚠️ settings write:", err)
return
}
// optional
// fmt.Println("✅ settings saved:", p)
}
func recordSettingsHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodGet:
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
case http.MethodPost:
var in RecorderSettings
if err := json.NewDecoder(r.Body).Decode(&in); err != nil {
http.Error(w, "invalid json: "+err.Error(), http.StatusBadRequest)
return
}
// --- normalize (WICHTIG: erst trim, dann leer-check, dann clean) ---
recRaw := strings.TrimSpace(in.RecordDir)
doneRaw := strings.TrimSpace(in.DoneDir)
if recRaw == "" || doneRaw == "" {
http.Error(w, "recordDir und doneDir dürfen nicht leer sein", http.StatusBadRequest)
return
}
in.RecordDir = filepath.Clean(recRaw)
in.DoneDir = filepath.Clean(doneRaw)
// Optional aber sehr empfehlenswert: "." verbieten
if in.RecordDir == "." || in.DoneDir == "." {
http.Error(w, "recordDir/doneDir dürfen nicht '.' sein", http.StatusBadRequest)
return
}
in.FFmpegPath = strings.TrimSpace(in.FFmpegPath)
in.TeaserPlayback = strings.ToLower(strings.TrimSpace(in.TeaserPlayback))
if in.TeaserPlayback == "" {
in.TeaserPlayback = "hover"
}
if in.TeaserPlayback != "still" && in.TeaserPlayback != "hover" && in.TeaserPlayback != "all" {
in.TeaserPlayback = "hover"
}
// Auto-Delete: clamp
if in.AutoDeleteSmallDownloadsBelowMB < 0 {
in.AutoDeleteSmallDownloadsBelowMB = 0
}
if in.AutoDeleteSmallDownloadsBelowMB > 100_000 {
in.AutoDeleteSmallDownloadsBelowMB = 100_000
}
// --- ensure folders (Fehler zurückgeben, falls z.B. keine Rechte) ---
recAbs, err := resolvePathRelativeToApp(in.RecordDir)
if err != nil {
http.Error(w, "ungültiger recordDir: "+err.Error(), http.StatusBadRequest)
return
}
doneAbs, err := resolvePathRelativeToApp(in.DoneDir)
if err != nil {
http.Error(w, "ungültiger doneDir: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(recAbs, 0o755); err != nil {
http.Error(w, "konnte recordDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
if err := os.MkdirAll(doneAbs, 0o755); err != nil {
http.Error(w, "konnte doneDir nicht erstellen: "+err.Error(), http.StatusBadRequest)
return
}
// ✅ Settings im RAM aktualisieren
settingsMu.Lock()
settings = in
settingsMu.Unlock()
// ✅ Settings auf Disk persistieren
saveSettingsToDisk()
// ✅ ffmpeg/ffprobe nach Änderungen neu bestimmen
// Tipp: wenn der User FFmpegPath explizit setzt, nutze den direkt.
if strings.TrimSpace(in.FFmpegPath) != "" {
ffmpegPath = in.FFmpegPath
} else {
ffmpegPath = detectFFmpegPath()
}
fmt.Println("🔍 ffmpegPath:", ffmpegPath)
ffprobePath = detectFFprobePath()
fmt.Println("🔍 ffprobePath:", ffprobePath)
w.Header().Set("Content-Type", "application/json")
w.Header().Set("Cache-Control", "no-store")
_ = json.NewEncoder(w).Encode(getSettings())
return
default:
http.Error(w, "Nur GET/POST erlaubt", http.StatusMethodNotAllowed)
return
}
}
func settingsBrowse(w http.ResponseWriter, r *http.Request) {
target := r.URL.Query().Get("target")
if target != "record" && target != "done" && target != "ffmpeg" {
http.Error(w, "target muss record, done oder ffmpeg sein", http.StatusBadRequest)
return
}
var (
p string
err error
)
if target == "ffmpeg" {
// Dateiauswahl für ffmpeg.exe
p, err = dialog.File().
Title("ffmpeg.exe auswählen").
Load()
} else {
// Ordnerauswahl für record/done
p, err = dialog.Directory().
Title("Ordner auswählen").
Browse()
}
if err != nil {
// User cancelled → 204 No Content ist praktisch fürs Frontend
if strings.Contains(strings.ToLower(err.Error()), "cancel") {
w.WriteHeader(http.StatusNoContent)
return
}
http.Error(w, "auswahl fehlgeschlagen: "+err.Error(), http.StatusInternalServerError)
return
}
// optional: wenn innerhalb exe-dir, als RELATIV zurückgeben
p = maybeMakeRelativeToExe(p)
w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(map[string]string{"path": p})
}
func maybeMakeRelativeToExe(abs string) string {
exe, err := os.Executable()
if err != nil {
return abs
}
base := filepath.Dir(exe)
rel, err := filepath.Rel(base, abs)
if err != nil {
return abs
}
// wenn rel mit ".." beginnt -> nicht innerhalb base -> absoluten Pfad behalten
if rel == "." || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
return abs
}
return filepath.ToSlash(rel) // frontend-freundlich
}

View File

@ -10,6 +10,7 @@ import (
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
@ -37,6 +38,66 @@ type heightCacheEntry struct {
var heightCacheMu sync.Mutex
var heightCache = map[string]heightCacheEntry{}
type durationCacheEntry struct {
mtime time.Time
size int64
dur float64
}
var durationCacheMu sync.Mutex
var durationCache = map[string]durationCacheEntry{}
func probeVideoDurationSeconds(ctx context.Context, inPath string) (float64, error) {
// ffprobe -v error -show_entries format=duration -of csv=p=0 <file>
cmd := exec.CommandContext(ctx, "ffprobe",
"-v", "error",
"-show_entries", "format=duration",
"-of", "csv=p=0",
inPath,
)
out, err := cmd.Output()
if err != nil {
return 0, err
}
s := strings.TrimSpace(string(out))
if s == "" {
return 0, fmt.Errorf("ffprobe returned empty duration")
}
d, err := strconv.ParseFloat(s, 64)
if err != nil || d <= 0 {
return 0, fmt.Errorf("bad duration %q", s)
}
return d, nil
}
func getVideoDurationSecondsCached(ctx context.Context, inPath string) (float64, error) {
fi, err := os.Stat(inPath)
if err != nil || fi.IsDir() || fi.Size() <= 0 {
return 0, fmt.Errorf("input not usable")
}
durationCacheMu.Lock()
if e, ok := durationCache[inPath]; ok {
if e.size == fi.Size() && e.mtime.Equal(fi.ModTime()) && e.dur > 0 {
d := e.dur
durationCacheMu.Unlock()
return d, nil
}
}
durationCacheMu.Unlock()
d, err := probeVideoDurationSeconds(ctx, inPath)
if err != nil {
return 0, err
}
durationCacheMu.Lock()
durationCache[inPath] = durationCacheEntry{mtime: fi.ModTime(), size: fi.Size(), dur: d}
durationCacheMu.Unlock()
return d, nil
}
func probeVideoHeight(ctx context.Context, inPath string) (int, error) {
// ffprobe -v error -select_streams v:0 -show_entries stream=height -of csv=p=0 <file>
cmd := exec.CommandContext(ctx, "ffprobe",
@ -93,27 +154,40 @@ type TranscodeProfile struct {
Height int
}
func profileFromQuality(q string) (TranscodeProfile, bool) {
switch strings.ToLower(strings.TrimSpace(q)) {
case "", "auto":
return TranscodeProfile{Name: "auto", Height: 0}, true
case "2160p":
return TranscodeProfile{Name: "2160p", Height: 2160}, true
case "1080p":
return TranscodeProfile{Name: "1080p", Height: 1080}, true
case "720p":
return TranscodeProfile{Name: "720p", Height: 720}, true
case "480p":
return TranscodeProfile{Name: "480p", Height: 480}, true
default:
func profileFromResolution(res string) (TranscodeProfile, bool) {
// Stash-like: LOW | MEDIUM | HIGH | ORIGINAL (case-insensitive)
s := strings.ToUpper(strings.TrimSpace(res))
switch s {
case "", "ORIGINAL", "SOURCE", "AUTO":
return TranscodeProfile{Name: "ORIGINAL", Height: 0}, true
case "LOW":
return TranscodeProfile{Name: "LOW", Height: 480}, true
case "MEDIUM":
return TranscodeProfile{Name: "MEDIUM", Height: 720}, true
case "HIGH":
return TranscodeProfile{Name: "HIGH", Height: 1080}, true
}
// Backwards-Kompatibilität: "<height>p" (z.B. 720p)
s2 := strings.ToLower(strings.TrimSpace(res))
if m := regexp.MustCompile(`^(\d{3,4})p$`).FindStringSubmatch(s2); m != nil {
h, err := strconv.Atoi(m[1])
if err != nil || h <= 0 {
return TranscodeProfile{}, false
}
if h < 144 || h > 4320 {
return TranscodeProfile{}, false
}
return TranscodeProfile{Name: fmt.Sprintf("%dp", h), Height: h}, true
}
return TranscodeProfile{}, false
}
// Cache layout: <doneAbs>/.transcodes/<canonicalID>/<quality>.mp4
func transcodeCachePath(doneAbs, canonicalID, quality string) string {
const v = "v1"
return filepath.Join(doneAbs, ".transcodes", canonicalID, v, quality+".mp4")
// Cache layout: <doneAbs>/.transcodes/<canonicalID>/<v>/<quality>/s<start>.mp4
func transcodeCachePath(doneAbs, canonicalID, quality string, startSec int) string {
const v = "v2"
return filepath.Join(doneAbs, ".transcodes", canonicalID, v, quality, fmt.Sprintf("s%d.mp4", startSec))
}
func ensureFFmpegAvailable() error {
@ -204,15 +278,21 @@ func runFFmpeg(ctx context.Context, args []string) error {
// Public entry used by recordVideo
// -------------------------
// maybeTranscodeForRequest inspects "quality" query param.
// maybeTranscodeForRequest inspects "resolution" query param.
// If quality is "auto" (or empty), it returns original outPath unchanged.
// Otherwise it ensures cached transcode exists & is fresh, and returns the cached path.
func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality string) (string, error) {
prof, ok := profileFromQuality(quality)
if !ok {
return "", fmt.Errorf("bad quality %q", quality)
func maybeTranscodeForRequest(rctx context.Context, originalPath string, resolution string, startSec int) (string, error) {
if startSec < 0 {
startSec = 0
}
if prof.Name == "auto" {
// optional: auf 2 Sekunden runter runden, passt zu GOP=60 (~2s bei 30fps)
startSec = (startSec / 2) * 2
prof, ok := profileFromResolution(resolution)
if !ok {
return "", fmt.Errorf("bad resolution %q", resolution)
}
if strings.EqualFold(prof.Name, "ORIGINAL") || prof.Height <= 0 {
return originalPath, nil
}
@ -221,22 +301,26 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
return "", err
}
// optional: skip transcode if source is already <= requested height (prevents upscaling)
needScale := true
if prof.Height > 0 {
// ffprobe is needed only for this optimization
if err := ensureFFprobeAvailable(); err == nil {
// short timeout for probing
pctx, cancel := context.WithTimeout(rctx, 5*time.Second)
defer cancel()
if srcH, err := getVideoHeightCached(pctx, originalPath); err == nil && srcH > 0 {
// if source is already at/below requested (with tiny tolerance), don't transcode
// Quelle <= Ziel => kein Downscale nötig
if srcH <= prof.Height+8 {
needScale = false
// ✅ WICHTIG: wenn startSec==0, liefern wir wirklich Original (keine Cache-Datei bauen)
if startSec == 0 {
return originalPath, nil
}
}
}
}
}
// Need doneAbs for cache root
s := getSettings()
@ -254,7 +338,8 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
return "", fmt.Errorf("canonical id empty")
}
cacheOut := transcodeCachePath(doneAbs, canonicalID, prof.Name)
qualityKey := strings.ToLower(strings.TrimSpace(prof.Name))
cacheOut := transcodeCachePath(doneAbs, canonicalID, qualityKey, startSec)
// fast path: already exists & fresh
if isCacheFresh(originalPath, cacheOut) {
@ -293,7 +378,13 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
_ = os.Remove(tmp)
// ffmpeg args
args := buildFFmpegArgs(originalPath, tmp, prof)
var args []string
if needScale {
args = buildFFmpegArgs(originalPath, tmp, prof, startSec)
} else {
// ✅ nativer Seek: schneiden ohne re-encode
args = buildFFmpegCopySegmentArgs(originalPath, tmp, startSec)
}
if err := runFFmpeg(ctx, args); err != nil {
_ = os.Remove(tmp)
@ -335,18 +426,27 @@ func maybeTranscodeForRequest(rctx context.Context, originalPath string, quality
// ffmpeg profiles
// -------------------------
func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile) []string {
func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile, startSec int) []string {
// You can tune these defaults:
// - CRF: lower => better quality, bigger file (1080p ~22, 720p ~23, 480p ~24/25)
// - preset: veryfast is good for on-demand
crf := "23"
switch prof.Name {
case "1080p":
h := prof.Height
switch {
case h >= 2160:
crf = "20"
case h >= 1440:
crf = "21"
case h >= 1080:
crf = "22"
case "720p":
case h >= 720:
crf = "23"
case "480p":
case h >= 480:
crf = "25"
case h >= 360:
crf = "27"
default:
crf = "29"
}
// Keyframes: choose a stable value; if you want dynamic based on fps you can extend later.
@ -359,12 +459,27 @@ func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile) []string {
// scale keeps aspect ratio, ensures even width
vf := fmt.Sprintf("scale=-2:%d", prof.Height)
return []string{
// sanitize start
if startSec < 0 {
startSec = 0
}
// optional: align to small buckets to reduce cache fragmentation (and match GOP-ish seeking)
// startSec = (startSec / 2) * 2
args := []string{
"-hide_banner",
"-loglevel", "error",
"-nostdin",
"-y",
}
// ✅ Startposition: VOR "-i" => schnelles Seek zum nächsten Keyframe (gut für on-demand)
// (Wenn du frame-genau willst: "-ss" NACH "-i", ist aber deutlich langsamer.)
if startSec > 0 {
args = append(args, "-ss", strconv.Itoa(startSec))
}
args = append(args,
"-i", inPath,
// ✅ robust: falls Audio fehlt, trotzdem kein Fehler
@ -394,58 +509,105 @@ func buildFFmpegArgs(inPath, outPath string, prof TranscodeProfile) []string {
"-movflags", movflags,
outPath,
}
)
return args
}
func buildFFmpegStreamArgs(inPath string, prof TranscodeProfile) []string {
crf := "23"
switch prof.Name {
case "1080p":
crf = "22"
case "720p":
crf = "23"
case "480p":
crf = "25"
}
gop := "60"
vf := fmt.Sprintf("scale=-2:%d", prof.Height)
movflags := "frag_keyframe+empty_moov+default_base_moof"
return []string{
func buildFFmpegCopySegmentArgs(inPath, outPath string, startSec int) []string {
args := []string{
"-hide_banner",
"-loglevel", "error",
"-nostdin",
"-y",
"-i", inPath,
}
// ✅ robust (wie im File-Transcode)
if startSec > 0 {
args = append(args, "-ss", strconv.Itoa(startSec))
}
args = append(args,
"-i", inPath,
"-map", "0:v:0?",
"-map", "0:a:0?",
"-sn",
"-vf", vf,
// ✅ kein re-encode
"-c", "copy",
// ✅ fürs normale File: moov nach vorne
"-movflags", "+faststart",
outPath,
)
return args
}
func buildFFmpegStreamArgs(inPath string, prof TranscodeProfile) []string {
// Stash streamt MP4 als fragmented MP4 mit empty_moov
// (kein default_base_moof für "plain mp4 stream").
movflags := "frag_keyframe+empty_moov"
// Stash-ähnliche CRF-Werte
crf := "25"
switch strings.ToUpper(strings.TrimSpace(prof.Name)) {
case "HIGH", "1080P":
crf = "23"
case "MEDIUM", "720P":
crf = "25"
case "LOW", "480P":
crf = "27"
}
args := []string{
"-hide_banner",
"-loglevel", "error",
"-nostdin",
// "-y" ist bei pipe egal, kann aber bleiben ich lasse es weg wie im Beispiel
}
// Input
args = append(args, "-i", inPath)
// robust: Video/Audio optional
args = append(args,
"-map", "0:v:0?",
"-map", "0:a:0?",
"-sn",
)
// Scale nur wenn wir wirklich runterskalieren wollen
if prof.Height > 0 {
vf := fmt.Sprintf("scale=-2:%d", prof.Height)
args = append(args, "-vf", vf)
}
// Video
args = append(args,
"-c:v", "libx264",
"-preset", "veryfast",
"-crf", crf,
"-pix_fmt", "yuv420p",
"-max_muxing_queue_size", "1024",
"-g", gop,
"-keyint_min", gop,
"-sc_threshold", "0",
"-max_muxing_queue_size", "1024",
)
// Audio (nur wenn vorhanden wegen map 0:a:0?)
args = append(args,
"-c:a", "aac",
"-b:a", "128k",
"-ac", "2",
)
// MP4 stream flags
args = append(args,
"-movflags", movflags,
"-f", "mp4",
"pipe:1",
}
"pipe:", // wichtig: wie im Beispiel
)
return args
}
// -------------------------

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,8 +5,8 @@
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
<title>App</title>
<script type="module" crossorigin src="/assets/index-DV6ZfOPf.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-BRCxVTHL.css">
<script type="module" crossorigin src="/assets/index-DlgYo3oN.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-SqYhLYXQ.css">
</head>
<body>
<div id="root"></div>

View File

@ -156,6 +156,75 @@ function getProviderFromNormalizedUrl(normUrl: string): Provider | null {
}
}
function chaturbateUserFromUrl(normUrl: string): string {
try {
const u = new URL(normUrl)
const host = u.hostname.replace(/^www\./i, '').toLowerCase()
if (host !== 'chaturbate.com' && !host.endsWith('.chaturbate.com')) return ''
// https://chaturbate.com/<name>/...
const parts = u.pathname.split('/').filter(Boolean)
return parts[0] ? decodeURIComponent(parts[0]).trim() : ''
} catch {
return ''
}
}
/**
* Macht aus "beliebigen" Provider-URLs eine EINDEUTIGE Standardform.
* -> wichtig für dedupe (Queue, alreadyRunning), Clipboard, Pending-Maps.
*/
function canonicalizeProviderUrl(normUrl: string): string {
const provider = getProviderFromNormalizedUrl(normUrl)
if (!provider) return normUrl
if (provider === 'chaturbate') {
const name = chaturbateUserFromUrl(normUrl)
return name ? `https://chaturbate.com/${encodeURIComponent(name)}/` : normUrl
}
// provider === 'mfc'
const name = mfcUserFromUrl(normUrl)
// Standardisiere auf EIN Format (hier: #<name>)
return name ? `https://www.myfreecams.com/#${encodeURIComponent(name)}` : normUrl
}
/** Gibt den "ModelKey" aus einer URL zurück (lowercased) für beide Provider */
function providerKeyLowerFromUrl(normUrl: string): string {
const provider = getProviderFromNormalizedUrl(normUrl)
if (!provider) return ''
const raw = provider === 'chaturbate' ? chaturbateUserFromUrl(normUrl) : mfcUserFromUrl(normUrl)
return (raw || '').trim().toLowerCase()
}
function mfcUserFromUrl(normUrl: string): string {
try {
const u = new URL(normUrl)
const host = u.hostname.replace(/^www\./i, '').toLowerCase()
// nur MFC
if (host !== 'myfreecams.com' && !host.endsWith('.myfreecams.com')) return ''
// typische MFC Profile-URLs:
// https://www.myfreecams.com/#<name>
// https://www.myfreecams.com/#/models/<name>
// https://www.myfreecams.com/<name> (seltener)
const hash = (u.hash || '').replace(/^#\/?/, '') // "#/models/foo" -> "models/foo"
if (hash) {
const parts = hash.split('/').filter(Boolean)
const last = parts[parts.length - 1] || ''
if (last) return decodeURIComponent(last).trim()
}
const parts = u.pathname.split('/').filter(Boolean)
const last = parts[parts.length - 1] || ''
return last ? decodeURIComponent(last).trim() : ''
} catch {
return ''
}
}
const baseName = (p: string) => (p || '').replaceAll('\\', '/').split('/').pop() || ''
function replaceBasename(fullPath: string, newBase: string) {
@ -232,6 +301,8 @@ export default function App() {
setCbOnlineByKeyLower({})
cbOnlineByKeyLowerRef.current = {}
startedToastByJobIdRef.current = {}
jobsInitDoneRef.current = false
setPendingWatchedRooms([])
setPendingAutoStartByKey({})
@ -248,6 +319,50 @@ export default function App() {
const notify = useNotify()
const notifyRef = useRef(notify)
// ✅ Dedupe für "Cookies fehlen" Meldung (damit silent/autostarts nicht spammen)
const cookieProblemLastAtRef = useRef(0)
const isCookieGateError = (msg: string) => {
const m = (msg || '').toLowerCase()
return (
m.includes('altersverifikationsseite erhalten') ||
m.includes('verify your age') ||
m.includes('schutzseite von cloudflare erhalten') ||
m.includes('just a moment') ||
m.includes('kein room-html')
)
}
const showMissingCookiesMessage = (opts?: { silent?: boolean }) => {
const silent = Boolean(opts?.silent)
const title = 'Cookies fehlen oder sind abgelaufen'
const body =
'Der Recorder hat statt des Room-HTML eine Schutz-/Altersverifikationsseite erhalten. ' +
'Bitte Cookies aktualisieren (bei Chaturbate z.B. cf_clearance + sessionId) und erneut starten.'
// Wenn Nutzer aktiv klickt: oben als Error-Box zeigen + Cookie-Modal anbieten
if (!silent) {
setError(`⚠️ ${title}. ${body}`)
// optional aber hilfreich: Modal direkt öffnen
setCookieModalOpen(true)
return
}
// Bei silent (Auto-Start / Queue): nur selten Toast
const now = Date.now()
if (now - cookieProblemLastAtRef.current > 15_000) {
cookieProblemLastAtRef.current = now
notifyRef.current?.error(title, body)
}
}
useEffect(() => {
notifyRef.current = notify
}, [notify])
// ✅ Perf: PerformanceMonitor erst nach initialer Render/Hydration anzeigen
const [showPerfMon, setShowPerfMon] = useState(false)
@ -366,14 +481,38 @@ export default function App() {
useEffect(() => {
const onOpen = (ev: Event) => {
const e = ev as CustomEvent<{ modelKey?: string }>
const raw = (e.detail?.modelKey ?? '').trim()
const raw0 = (e.detail?.modelKey ?? '').trim()
if (!raw0) return
let k = raw.replace(/^https?:\/\//i, '')
// 1) Wenn es "nur ein Key" ist (z.B. maypeach), direkt übernehmen
// Heuristik: keine Spaces, keine Slashes -> sehr wahrscheinlich Key
const looksLikeKey =
!raw0.includes(' ') &&
!raw0.includes('/') &&
!raw0.includes('\\')
if (looksLikeKey) {
const k = raw0.replace(/^@/, '').trim().toLowerCase()
if (k) setDetailsModelKey(k)
return
}
// 2) Sonst: URL/Path normalisieren + Provider-Key extrahieren
const norm0 = normalizeHttpUrl(raw0)
if (!norm0) {
// Fallback auf alte Key-Logik (falls raw sowas wie "chaturbate.com/im_jasmine" ist)
let k = raw0.replace(/^https?:\/\//i, '')
if (k.includes('/')) k = k.split('/').filter(Boolean).pop() || k
if (k.includes(':')) k = k.split(':').pop() || k
k = k.trim().toLowerCase()
if (k) setDetailsModelKey(k)
return
}
const norm = canonicalizeProviderUrl(norm0)
const keyLower = providerKeyLowerFromUrl(norm)
if (keyLower) setDetailsModelKey(keyLower)
}
window.addEventListener('open-model-details', onOpen as any)
@ -479,6 +618,11 @@ export default function App() {
const busyRef = useRef(false)
const cookiesRef = useRef<Record<string, string>>({})
const jobsRef = useRef<RecordJob[]>([])
// ✅ "Job gestartet" Toast: dedupe (auch gegen SSE/polling) + initial-load suppression
const startedToastByJobIdRef = useRef<Record<string, true>>({})
const jobsInitDoneRef = useRef(false)
useEffect(() => {
busyRef.current = busy
}, [busy])
@ -493,9 +637,163 @@ export default function App() {
const pendingStartUrlRef = useRef<string | null>(null)
const lastClipboardUrlRef = useRef<string>('')
// --- START QUEUE (parallel) ---
const START_CONCURRENCY = 4 // ⬅️ kannst du höher setzen, aber 4 ist ein guter Start
type StartQueueItem = {
url: string
silent: boolean
pendingKeyLower?: string // wenn aus pendingAutoStartByKey kommt
}
const startQueueRef = useRef<StartQueueItem[]>([])
const startInFlightRef = useRef(0)
const startQueuedSetRef = useRef<Set<string>>(new Set()) // dedupe: verhindert Duplikate
const pumpStartQueueScheduledRef = useRef(false)
const setBusyFromStarts = useCallback(() => {
const v = startInFlightRef.current > 0
setBusy(v)
busyRef.current = v
}, [])
const enqueueStart = useCallback(
(item: StartQueueItem) => {
const norm0 = normalizeHttpUrl(item.url)
if (!norm0) return false
const norm = canonicalizeProviderUrl(norm0)
// dedupe: gleiche URL nicht 100x in die Queue
if (startQueuedSetRef.current.has(norm)) return true
startQueuedSetRef.current.add(norm)
startQueueRef.current.push({ ...item, url: norm })
// pump einmal pro Tick schedulen
if (!pumpStartQueueScheduledRef.current) {
pumpStartQueueScheduledRef.current = true
queueMicrotask(() => {
pumpStartQueueScheduledRef.current = false
void pumpStartQueue()
})
}
return true
},
// pumpStartQueue kommt gleich darunter (useCallback), daher eslint ggf. meckert -> ok, wir definieren pumpStartQueue als function declaration unten
[]
)
async function doStartNow(normUrl: string, silent: boolean): Promise<boolean> {
normUrl = canonicalizeProviderUrl(normUrl)
// ✅ Duplicate-running guard (wie vorher)
const alreadyRunning = jobsRef.current.some((j) => {
if (String(j.status || '').toLowerCase() !== 'running') return false
if ((j as any).endedAt) return false
const jNorm0 = normalizeHttpUrl(String((j as any).sourceUrl || ''))
const jNorm = jNorm0 ? canonicalizeProviderUrl(jNorm0) : ''
return jNorm === normUrl
})
if (alreadyRunning) return true
try {
const currentCookies = cookiesRef.current
const provider = getProviderFromNormalizedUrl(normUrl)
if (!provider) {
if (!silent) setError('Nur chaturbate.com oder myfreecams.com werden unterstützt.')
return false
}
if (provider === 'chaturbate' && !hasRequiredChaturbateCookies(currentCookies)) {
if (!silent) setError('Für Chaturbate müssen die Cookies "cf_clearance" und "sessionId" gesetzt sein.')
return false
}
const cookieString = Object.entries(currentCookies)
.map(([k, v]) => `${k}=${v}`)
.join('; ')
const created = await apiJSON<RecordJob>('/api/record', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ url: normUrl, cookie: cookieString }),
})
if (created?.id) startedToastByJobIdRef.current[String(created.id)] = true
// UI sofort aktualisieren (optional)
setJobs((prev) => [created, ...prev])
jobsRef.current = [created, ...jobsRef.current]
return true
} catch (e: any) {
const msg = e?.message ?? String(e)
// ✅ Spezialfall: Age-Gate / Cloudflare / kein Room-HTML => Cookies Hinweis
if (isCookieGateError(msg)) {
showMissingCookiesMessage({ silent })
return false
}
if (!silent) setError(msg)
return false
}
}
async function pumpStartQueue(): Promise<void> {
// so viele wie möglich parallel starten
while (startInFlightRef.current < START_CONCURRENCY && startQueueRef.current.length > 0) {
const next = startQueueRef.current.shift()!
startInFlightRef.current++
setBusyFromStarts()
void (async () => {
try {
const ok = await doStartNow(next.url, next.silent)
// wenn das aus pendingAutoStartByKey kam: nur bei Erfolg dort löschen
if (ok && next.pendingKeyLower) {
const kLower = next.pendingKeyLower
setPendingAutoStartByKey((prev) => {
const copy = { ...(prev || {}) }
delete copy[kLower]
pendingAutoStartByKeyRef.current = copy
return copy
})
}
} finally {
// dedupe wieder freigeben
startQueuedSetRef.current.delete(next.url)
startInFlightRef.current = Math.max(0, startInFlightRef.current - 1)
setBusyFromStarts()
// falls noch was da ist: weiterpumpen
if (startQueueRef.current.length > 0) {
void pumpStartQueue()
}
}
})()
}
}
// ✅ Zentraler Snapshot: username(lower) -> room
const [cbOnlineByKeyLower, setCbOnlineByKeyLower] = useState<Record<string, ChaturbateOnlineRoom>>({})
const cbOnlineByKeyLowerRef = useRef<Record<string, ChaturbateOnlineRoom>>({})
const lastCbShowByKeyLowerRef = useRef<Record<string, string>>({})
// ✅ merkt sich, ob ein Model im letzten Snapshot überhaupt online war
const lastCbOnlineByKeyLowerRef = useRef<Record<string, true>>({})
// ✅ verhindert Toast-Spam direkt beim ersten Poll (Startup)
const cbOnlineInitDoneRef = useRef(false)
// ✅ merkt sich, ob ein Model seit App-Start schon einmal online war
const everCbOnlineByKeyLowerRef = useRef<Record<string, true>>({})
useEffect(() => {
cbOnlineByKeyLowerRef.current = cbOnlineByKeyLower
}, [cbOnlineByKeyLower])
@ -539,8 +837,9 @@ export default function App() {
// ✅ StartURL (hier habe ich den alten Online-Fetch entfernt und nur Snapshot genutzt)
const startUrl = useCallback(async (rawUrl: string, opts?: { silent?: boolean }): Promise<boolean> => {
const norm = normalizeHttpUrl(rawUrl)
if (!norm) return false
const norm0 = normalizeHttpUrl(rawUrl)
if (!norm0) return false
const norm = canonicalizeProviderUrl(norm0)
const silent = Boolean(opts?.silent)
if (!silent) setError(null)
@ -565,7 +864,8 @@ export default function App() {
// ✅ Wenn endedAt existiert: Aufnahme ist fertig -> Postwork/Queue -> NICHT blocken
if ((j as any).endedAt) return false
const jNorm = normalizeHttpUrl(String((j as any).sourceUrl || ''))
const jNorm0 = normalizeHttpUrl(String((j as any).sourceUrl || ''))
const jNorm = jNorm0 ? canonicalizeProviderUrl(jNorm0) : ''
return jNorm === norm
})
if (alreadyRunning) return true
@ -621,11 +921,23 @@ export default function App() {
body: JSON.stringify({ url: norm, cookie: cookieString }),
})
// ✅ verhindert Doppel-Toast: StartUrl toastet ggf. schon selbst,
// und kurz danach kommt der Job nochmal über SSE/polling rein.
if (created?.id) startedToastByJobIdRef.current[String(created.id)] = true
setJobs((prev) => [created, ...prev])
jobsRef.current = [created, ...jobsRef.current]
return true
} catch (e: any) {
if (!silent) setError(e?.message ?? String(e))
const msg = e?.message ?? String(e)
// ✅ Spezialfall: Age-Gate / Cloudflare / kein Room-HTML => Cookies Hinweis
if (isCookieGateError(msg)) {
showMissingCookiesMessage({ silent })
return false
}
if (!silent) setError(msg)
return false
} finally {
setBusy(false)
@ -849,50 +1161,122 @@ export default function App() {
if (donePage > maxPage) setDonePage(maxPage)
}, [doneCount, donePage])
// jobs SSE / polling (unverändert)
// jobs SSE / polling (mit "Job gestartet" Toast für Backend-Autostarts)
useEffect(() => {
if (!authed) return // ✅ WICHTIG: bei Logout alles stoppen
let cancelled = false
let es: EventSource | null = null
let fallbackTimer: number | null = null
let inFlight = false
const stopFallbackPolling = () => {
if (fallbackTimer) {
window.clearInterval(fallbackTimer)
fallbackTimer = null
}
}
const applyList = (list: any) => {
const arr = Array.isArray(list) ? (list as RecordJob[]) : []
if (cancelled) return
// --- vorheriger Snapshot für Status-Transitions ---
const prev = jobsRef.current
const prevById = new Map(prev.map((j) => [j.id, j.status]))
const prevStatusById = new Map<string, string>()
for (const j of Array.isArray(prev) ? prev : []) {
const id = String((j as any)?.id ?? '')
if (!id) continue
prevStatusById.set(id, String((j as any)?.status ?? ''))
}
// ✅ 0) Initial load: KEINE Toasts, aber als "gesehen" markieren (falls du später wieder Start-Toast einführen willst)
if (!jobsInitDoneRef.current) {
const seen: Record<string, true> = {}
for (const j of arr) {
const id = String((j as any)?.id ?? '')
if (id) seen[id] = true
}
startedToastByJobIdRef.current = seen
jobsInitDoneRef.current = true
}
// ✅ Finished/Stopped/Failed Transition zählen -> Count-Hint + Asset-Bump
const terminal = new Set(['finished', 'stopped', 'failed'])
let endedDelta = 0
for (const j of arr) {
const ps = prevById.get(j.id)
if (!ps || ps === j.status) continue
const id = String((j as any)?.id ?? '')
if (!id) continue
const before = String(prevStatusById.get(id) ?? '').toLowerCase().trim()
const now = String((j as any)?.status ?? '').toLowerCase().trim()
if (!before || before === now) continue
// nur zählen, wenn wir "neu" in einen terminal state gehen
if (terminal.has(j.status) && !terminal.has(ps)) {
endedDelta++
}
if (terminal.has(now) && !terminal.has(before)) endedDelta++
}
if (endedDelta > 0) {
// ✅ Tabs/Count sofort aktualisieren auch wenn Finished-Tab nicht offen ist
window.dispatchEvent(
new CustomEvent('finished-downloads:count-hint', { detail: { delta: endedDelta } })
)
// deine bestehenden Asset-Bumps (thumbnails etc.)
bumpAssetsTwice()
}
setJobs(arr)
jobsRef.current = arr
// ---- Queue-Info berechnen (Postwork-Warteschlange) ----
const statusLower = (j: any) => String(j?.status ?? '').toLowerCase().trim()
const isPostworkQueued = (j: any) => {
const s = statusLower(j)
return s === 'postwork' || s === 'queued_postwork' || s === 'waiting_postwork'
}
const ts = (j: any) =>
Number(
j?.endedAtMs ??
j?.endedAt ??
j?.createdAtMs ??
j?.createdAt ??
j?.startedAtMs ??
j?.startedAt ??
0
) || 0
const postworkQueue = arr
.filter(isPostworkQueued)
.slice()
.sort((a, b) => ts(a) - ts(b))
const postworkTotal = postworkQueue.length
const postworkPosById = new Map<string, number>()
for (let i = 0; i < postworkQueue.length; i++) {
const id = String((postworkQueue[i] as any)?.id ?? '')
if (id) postworkPosById.set(id, i + 1)
}
const arrWithQueue = arr.map((j: any) => {
const id = String(j?.id ?? '')
const pos = id ? postworkPosById.get(id) : undefined
if (!pos) return j
return {
...j,
postworkQueuePos: pos,
postworkQueueTotal: postworkTotal,
}
})
setJobs(arrWithQueue)
jobsRef.current = arrWithQueue
setPlayerJob((prevJob) => {
if (!prevJob) return prevJob
const updated = arr.find((j) => j.id === prevJob.id)
const updated = arrWithQueue.find((j) => j.id === prevJob.id)
if (updated) return updated
// wenn running und nicht mehr in list: player schließen, sonst stehen lassen
return prevJob.status === 'running' ? null : prevJob
})
}
@ -919,7 +1303,13 @@ export default function App() {
es = new EventSource('/api/record/stream')
// ✅ wenn SSE wieder verbunden ist: Fallback-Polling stoppen
es.onopen = () => {
stopFallbackPolling()
}
const onJobs = (ev: MessageEvent) => {
stopFallbackPolling() // ✅ sobald Daten kommen, Polling aus
try {
applyList(JSON.parse(ev.data))
} catch {}
@ -932,18 +1322,20 @@ export default function App() {
if (!document.hidden) void loadOnce()
}
document.addEventListener('visibilitychange', onVis)
window.addEventListener('hover', onVis)
// ❌ das hier empfehle ich rauszuwerfen, siehe Schritt C
// window.addEventListener('hover', onVis)
return () => {
cancelled = true
if (fallbackTimer) window.clearInterval(fallbackTimer)
stopFallbackPolling()
document.removeEventListener('visibilitychange', onVis)
window.removeEventListener('hover', onVis)
// window.removeEventListener('hover', onVis)
es?.removeEventListener('jobs', onJobs as any)
es?.close()
es = null
}
}, [bumpAssetsTwice])
}, [authed])
useEffect(() => {
if (selectedTab !== 'finished') return
@ -1177,10 +1569,13 @@ export default function App() {
const handleAddToDownloads = useCallback(
async (job: RecordJob): Promise<boolean> => {
const raw = String((job as any)?.sourceUrl ?? '')
const url = extractFirstUrl(raw)
if (!url) return false
const url0 = extractFirstUrl(raw)
if (!url0) return false
// silent=true -> keine rote Error-Box, wir geben Feedback über Checkmark/Toast
const norm0 = normalizeHttpUrl(url0)
if (!norm0) return false
const url = canonicalizeProviderUrl(norm0)
const ok = await startUrl(url, { silent: true })
if (!ok) {
@ -1921,21 +2316,25 @@ export default function App() {
inFlight = true
try {
const text = await navigator.clipboard.readText()
const url = extractFirstUrl(text)
if (!url) return
if (!getProviderFromNormalizedUrl(url)) return
const url0 = extractFirstUrl(text)
if (!url0) return
const norm0 = normalizeHttpUrl(url0)
if (!norm0) return
const provider = getProviderFromNormalizedUrl(norm0)
if (!provider) return
const url = canonicalizeProviderUrl(norm0)
if (url === lastClipboardUrlRef.current) return
lastClipboardUrlRef.current = url
if (autoAddEnabled) setSourceUrl(url)
if (autoStartEnabled) {
if (busyRef.current) {
pendingStartUrlRef.current = url
} else {
pendingStartUrlRef.current = null
await startUrl(url)
}
// ✅ immer enqueue (dedupe verhindert doppelt)
enqueueStart({ url, silent: false })
}
} catch {
// ignore
@ -1966,15 +2365,6 @@ export default function App() {
}
}, [autoAddEnabled, autoStartEnabled, startUrl])
useEffect(() => {
if (busy) return
if (!autoStartEnabled) return
const pending = pendingStartUrlRef.current
if (!pending) return
pendingStartUrlRef.current = null
void startUrl(pending)
}, [busy, autoStartEnabled, startUrl])
useEffect(() => {
const stop = startChaturbateOnlinePolling({
getModels: () => {
@ -2006,7 +2396,11 @@ export default function App() {
if (!data?.enabled) {
setCbOnlineByKeyLower({})
cbOnlineByKeyLowerRef.current = {}
lastCbShowByKeyLowerRef.current = {}
setPendingWatchedRooms([])
everCbOnlineByKeyLowerRef.current = {}
cbOnlineInitDoneRef.current = false
lastCbOnlineByKeyLowerRef.current = {}
setLastHeaderUpdateAtMs(Date.now())
return
}
@ -2020,6 +2414,97 @@ export default function App() {
setCbOnlineByKeyLower(nextSnap)
cbOnlineByKeyLowerRef.current = nextSnap
// ✅ Toasts: (A) watched offline->online, (B) waiting->public, (C) online->offline->online => "wieder online"
try {
const notificationsOn = Boolean((recSettingsRef.current as any).enableNotifications ?? true)
const waiting = new Set(['private', 'away', 'hidden'])
// watched-Keys (nur Chaturbate)
const watchedSetLower = new Set(
Object.values(modelsByKeyRef.current || {})
.filter((m) => Boolean(m?.watching) && String(m?.host ?? '').toLowerCase().includes('chaturbate'))
.map((m) => String(m?.modelKey ?? '').trim().toLowerCase())
.filter(Boolean)
)
const prevShow = lastCbShowByKeyLowerRef.current || {}
const nextShowMap: Record<string, string> = { ...prevShow }
const prevOnline = lastCbOnlineByKeyLowerRef.current || {}
const isInitial = !cbOnlineInitDoneRef.current
// ✅ "war schon mal online" Snapshot (vor diesem Poll)
const everOnline = everCbOnlineByKeyLowerRef.current || {}
const nextEverOnline: Record<string, true> = { ...everOnline }
for (const [keyLower, room] of Object.entries(nextSnap)) {
const nowShow = String((room as any)?.current_show ?? '').toLowerCase().trim()
const beforeShow = String(prevShow[keyLower] ?? '').toLowerCase().trim()
const wasOnline = Boolean(prevOnline[keyLower])
const isOnline = true // weil es in nextSnap ist
const becameOnline = isOnline && !wasOnline
// ✅ war irgendwann schon mal online (vor diesem Poll)?
const hadEverBeenOnline = Boolean(everOnline[keyLower])
const name = String((room as any)?.username ?? keyLower).trim() || keyLower
const imageUrl = String((room as any)?.image_url ?? '').trim()
// immer merken: jetzt ist es online
nextEverOnline[keyLower] = true
// (B) waiting -> public => "wieder online" (höchste Priorität, damit kein Doppel-Toast)
const becamePublicFromWaiting = nowShow === 'public' && waiting.has(beforeShow)
if (becamePublicFromWaiting) {
if (notificationsOn) {
notify.info(name, 'ist wieder online.', {
imageUrl,
imageAlt: `${name} Vorschau`,
durationMs: 5500,
})
}
if (nowShow) nextShowMap[keyLower] = nowShow
continue
}
// (A/C) watched: offline -> online
if (watchedSetLower.has(keyLower) && becameOnline) {
// C: online->offline->online => "wieder online"
const cameBackFromOffline = hadEverBeenOnline
// Startup-Spam vermeiden
if (notificationsOn && !isInitial) {
notify.info(
name,
cameBackFromOffline ? 'ist wieder online.' : 'ist online.',
{
imageUrl,
imageAlt: `${name} Vorschau`,
durationMs: 5500,
}
)
}
}
if (nowShow) nextShowMap[keyLower] = nowShow
}
// Presence-Snapshot merken
const nextOnline: Record<string, true> = {}
for (const k of Object.keys(nextSnap)) nextOnline[k] = true
lastCbOnlineByKeyLowerRef.current = nextOnline
// ✅ "ever online" merken
everCbOnlineByKeyLowerRef.current = nextEverOnline
cbOnlineInitDoneRef.current = true
lastCbShowByKeyLowerRef.current = nextShowMap
} catch {
// ignore
}
// Online-Keys für Store
const storeKeys = chaturbateStoreKeysLowerRef.current
const nextOnlineStore: Record<string, true> = {}
@ -2100,16 +2585,8 @@ export default function App() {
const url = pendingMap[kLower]
if (!url) continue
const ok = await startUrl(url, { silent: true })
if (ok) {
// ✅ State + Ref gleichzeitig “synchron” löschen
setPendingAutoStartByKey((prev) => {
const copy = { ...(prev || {}) }
delete copy[kLower]
pendingAutoStartByKeyRef.current = copy
return copy
})
}
// ✅ nicht mehr seriell awaiten, sondern in die Start-Queue
enqueueStart({ url, silent: true, pendingKeyLower: kLower })
}
setLastHeaderUpdateAtMs(Date.now())

View File

@ -144,8 +144,11 @@ async function apiJSON<T>(url: string, init?: RequestInit): Promise<T> {
return res.json() as Promise<T>
}
function postWorkLabel(job: RecordJob): string {
const pw = job.postWork
function postWorkLabel(
job: RecordJob,
override?: { pos?: number; total?: number }
): string {
const pw = (job as any).postWork
if (!pw) return 'Warte auf Nacharbeiten…'
@ -158,24 +161,37 @@ function postWorkLabel(job: RecordJob): string {
}
if (pw.state === 'queued') {
const pos = typeof pw.position === 'number' ? pw.position : 0
const waiting = typeof pw.waiting === 'number' ? pw.waiting : 0
const running = typeof (pw as any).running === 'number' ? (pw as any).running : 0
// Backend-Werte (können was anderes zählen -> deshalb nur Fallback)
const posServer = typeof pw.position === 'number' ? pw.position : 0
const waitingServer = typeof pw.waiting === 'number' ? pw.waiting : 0
const runningServer = typeof (pw as any).running === 'number' ? (pw as any).running : 0
const totalServer = Math.max(waitingServer + runningServer, posServer)
// X = grobe Gesamtmenge (wartend + gerade laufend)
const total = Math.max(waiting + running, pos)
const pos =
typeof override?.pos === 'number' && Number.isFinite(override.pos) && override.pos > 0
? override.pos
: posServer
const total =
typeof override?.total === 'number' && Number.isFinite(override.total) && override.total > 0
? override.total
: totalServer
// Wunschformat: "64 / X"
return pos > 0 && total > 0
? `Warte auf Nacharbeiten… ${pos} / ${total}`
: 'Warte auf Nacharbeiten…'
}
return 'Warte auf Nacharbeiten…'
}
function StatusCell({ job }: { job: RecordJob }) {
function StatusCell({
job,
postworkInfo,
}: {
job: RecordJob
postworkInfo?: { pos?: number; total?: number }
}) {
const phaseRaw = String((job as any)?.phase ?? '').trim()
const progress = Number((job as any)?.progress ?? 0)
@ -186,7 +202,7 @@ function StatusCell({ job }: { job: RecordJob }) {
// ✅ postwork genauer machen (wartend/running + Position)
if (phase === 'postwork') {
phaseText = postWorkLabel(job)
phaseText = postWorkLabel(job, postworkInfo)
}
if (isRecording) {
@ -240,6 +256,7 @@ function DownloadsCardRow({
blurPreviews,
modelsByKey,
stopRequestedIds,
postworkInfoOf,
markStopRequested,
onOpenPlayer,
onStopJob,
@ -252,6 +269,7 @@ function DownloadsCardRow({
blurPreviews?: boolean
modelsByKey: Record<string, { favorite?: boolean; liked?: boolean | null; watching?: boolean }>
stopRequestedIds: Record<string, true>
postworkInfoOf: (job: RecordJob) => { pos?: number; total?: number } | undefined
markStopRequested: (ids: string | string[]) => void
onOpenPlayer: (job: RecordJob) => void
onStopJob: (id: string) => void
@ -368,7 +386,7 @@ function DownloadsCardRow({
if (phaseLower === 'recording') {
phaseText = 'Recording läuft…'
} else if (phaseLower === 'postwork') {
phaseText = postWorkLabel(j)
phaseText = postWorkLabel(j, postworkInfoOf(j))
}
const statusText = rawStatus || 'unknown'
@ -763,6 +781,69 @@ export default function Downloads({
return jobs.some((j) => !j.endedAt && j.status === 'running')
}, [jobs])
const postworkQueueInfoById = useMemo(() => {
const infoById = new Map<string, { pos: number; total: number }>()
const enqueueMsOf = (job: RecordJob): number => {
const anyJ = job as any
const pw = anyJ.postWork
return (
toMs(pw?.enqueuedAt) ||
toMs(anyJ.enqueuedAt) ||
toMs(anyJ.queuedAt) ||
toMs(anyJ.createdAt) ||
toMs(anyJ.addedAt) ||
toMs(job.endedAt) || // Postwork entsteht oft nach endedAt
toMs(job.startedAt) ||
0
)
}
// 1) alle relevanten Postwork-Jobs sammeln (queued + running)
const running: RecordJob[] = []
const queued: RecordJob[] = []
for (const j of jobs) {
const pw = (j as any)?.postWork
if (!pw) continue
const state = String(pw.state ?? '').toLowerCase()
if (state === 'running') running.push(j)
else if (state === 'queued') queued.push(j)
}
// 2) Reihenfolge stabil machen (FIFO)
running.sort((a, b) => enqueueMsOf(a) - enqueueMsOf(b))
queued.sort((a, b) => enqueueMsOf(a) - enqueueMsOf(b))
const runningCount = running.length
const total = runningCount + queued.length
// 3) Positionen setzen: running belegt "vorne", queued danach
for (let i = 0; i < queued.length; i++) {
const id = String((queued[i] as any)?.id ?? '')
if (!id) continue
infoById.set(id, { pos: runningCount + i + 1, total })
}
// optional (wenn du auch bei running "x / total" sehen willst):
// for (let i = 0; i < running.length; i++) {
// const id = String((running[i] as any)?.id ?? '')
// if (!id) continue
// infoById.set(id, { pos: i + 1, total })
// }
return infoById
}, [jobs])
const postworkInfoOf = useCallback(
(job: RecordJob) => {
const id = String((job as any)?.id ?? '')
return id ? postworkQueueInfoById.get(id) : undefined
},
[postworkQueueInfoById]
)
useEffect(() => {
if (!hasActive) return
const t = window.setInterval(() => setNowMs(Date.now()), 15000)
@ -954,7 +1035,7 @@ export default function Downloads({
cell: (r) => {
if (r.kind === 'job') {
const j = r.job
return <StatusCell job={j} />
return <StatusCell job={j} postworkInfo={postworkInfoOf(j)} />
}
const p = r.pending
@ -1073,7 +1154,7 @@ export default function Downloads({
},
},
]
}, [blurPreviews, markStopRequested, modelsByKey, nowMs, onStopJob, onToggleFavorite, onToggleLike, onToggleWatch, stopRequestedIds, stopInitiatedIds])
}, [blurPreviews, markStopRequested, modelsByKey, nowMs, onStopJob, onToggleFavorite, onToggleLike, onToggleWatch, stopRequestedIds, stopInitiatedIds, postworkInfoOf])
const downloadJobRows = useMemo<DownloadRow[]>(() => {
const list = jobs
@ -1197,6 +1278,7 @@ export default function Downloads({
nowMs={nowMs}
blurPreviews={blurPreviews}
modelsByKey={modelsByKey}
postworkInfoOf={postworkInfoOf}
stopRequestedIds={stopRequestedIds}
markStopRequested={markStopRequested}
onOpenPlayer={onOpenPlayer}
@ -1221,6 +1303,7 @@ export default function Downloads({
nowMs={nowMs}
blurPreviews={blurPreviews}
modelsByKey={modelsByKey}
postworkInfoOf={postworkInfoOf}
stopRequestedIds={stopRequestedIds}
markStopRequested={markStopRequested}
onOpenPlayer={onOpenPlayer}
@ -1245,6 +1328,7 @@ export default function Downloads({
nowMs={nowMs}
blurPreviews={blurPreviews}
modelsByKey={modelsByKey}
postworkInfoOf={postworkInfoOf}
stopRequestedIds={stopRequestedIds}
markStopRequested={markStopRequested}
onOpenPlayer={onOpenPlayer}

View File

@ -699,6 +699,9 @@ export default function FinishedDownloads({
// neben deletedKeys / deletingKeys
const [removingKeys, setRemovingKeys] = React.useState<Set<string>>(() => new Set())
// ⏱️ Timer pro Key, damit wir Optimistik bei Fehler sauber zurückrollen können
const removeTimersRef = React.useRef<Map<string, number>>(new Map())
const markRemoving = useCallback((key: string, value: boolean) => {
setRemovingKeys((prev) => {
const next = new Set(prev)
@ -708,21 +711,65 @@ export default function FinishedDownloads({
})
}, [])
const cancelRemoveTimer = useCallback((key: string) => {
const t = removeTimersRef.current.get(key)
if (t != null) {
window.clearTimeout(t)
removeTimersRef.current.delete(key)
}
}, [])
const restoreRow = useCallback(
(key: string) => {
// Timer stoppen (falls die "commit delete"-Phase noch aussteht)
cancelRemoveTimer(key)
// wieder sichtbar machen
setDeletedKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
setRemovingKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
setDeletingKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
setKeepingKeys((prev) => {
const next = new Set(prev)
next.delete(key)
return next
})
},
[cancelRemoveTimer]
)
const animateRemove = useCallback(
(key: string) => {
// 1) rot + fade-out starten
markRemoving(key, true)
// ggf. alten Timer entfernen (wenn mehrfach getriggert)
cancelRemoveTimer(key)
// 2) nach der Animation wirklich ausblenden + Seite auffüllen
window.setTimeout(() => {
const t = window.setTimeout(() => {
removeTimersRef.current.delete(key)
markDeleted(key)
markRemoving(key, false)
// ✅ wichtig: Seite sofort neu laden -> Item rückt nach
queueRefill()
}, 320)
removeTimersRef.current.set(key, t)
},
[markDeleted, markRemoving, queueRefill]
[markDeleted, markRemoving, queueRefill, cancelRemoveTimer]
)
const releasePlayingFile = useCallback(
@ -795,6 +842,9 @@ export default function FinishedDownloads({
return true
} catch (e: any) {
// ✅ falls irgendwo (z.B. via External-Event) schon optimistisch entfernt wurde: zurückrollen
restoreRow(key)
notify.error('Löschen fehlgeschlagen', String(e?.message || e))
return false
} finally {
@ -1060,30 +1110,32 @@ export default function FinishedDownloads({
if (detail.phase === 'start') {
markDeleting(key, true)
// ✅ wenn Cards-View: Swipe schon beim Start raus (ohne Aktion, weil App die API schon macht)
if (view === 'cards') {
window.setTimeout(() => {
markDeleted(key)
}, 320)
} else {
// ✅ Optimistik: überall gleich -> animiert raus
animateRemove(key)
}
} else if (detail.phase === 'error') {
markDeleting(key, false)
// ✅ Swipe zurück, falls Delete fehlgeschlagen
if (view === 'cards') {
swipeRefs.current.get(key)?.reset()
return
}
} else if (detail.phase === 'success') {
if (detail.phase === 'error') {
// ✅ alles zurückrollen -> wieder sichtbar
restoreRow(key)
// ✅ Swipe zurück (nur Cards relevant, schadet sonst aber nicht)
swipeRefs.current.get(key)?.reset()
return
}
if (detail.phase === 'success') {
// delete final bestätigt
markDeleting(key, false)
queueRefill()
return
}
}
window.addEventListener('finished-downloads:delete', onExternalDelete as EventListener)
return () => window.removeEventListener('finished-downloads:delete', onExternalDelete as EventListener)
}, [animateRemove, markDeleting, markDeleted, view, queueRefill])
}, [animateRemove, markDeleting, queueRefill, restoreRow])
useEffect(() => {
const onExternalRename = (ev: Event) => {

View File

@ -250,14 +250,15 @@ export default function LoginPage({ onLoggedIn }: Props) {
<div className="space-y-1">
<label htmlFor="totp" className="text-xs font-medium text-gray-700 dark:text-gray-200">2FA Code</label>
<input
id="totp"
name="totp"
id="id_code"
name="code"
aria-label="totp"
type="text"
value={code}
onChange={(e) => setCode(e.target.value)}
onKeyDown={onEnter}
autoComplete="one-time-code"
required
inputMode="numeric"
pattern="[0-9]*"
maxLength={6}
@ -349,13 +350,14 @@ export default function LoginPage({ onLoggedIn }: Props) {
<label htmlFor="totp" className="text-xs font-medium text-gray-700 dark:text-gray-200">2FA Code (zum Aktivieren)</label>
<input
id="totp-setup"
name="totp"
name="code"
aria-label="totp"
type="text"
value={code}
onChange={(e) => setCode(e.target.value)}
onKeyDown={onEnter}
autoComplete="one-time-code"
required
inputMode="numeric"
pattern="[0-9]*"
maxLength={6}

File diff suppressed because it is too large Load Diff

View File

@ -21,7 +21,7 @@ type RecorderSettings = {
teaserPlayback?: 'still' | 'hover' | 'all'
teaserAudio?: boolean
lowDiskPauseBelowGB?: number
enableNotifications?: boolean
}
type DiskStatus = {
@ -47,6 +47,7 @@ const DEFAULTS: RecorderSettings = {
teaserPlayback: 'hover',
teaserAudio: false,
lowDiskPauseBelowGB: 5,
enableNotifications: true,
}
type Props = {
@ -94,6 +95,7 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
teaserPlayback: (data as any).teaserPlayback ?? DEFAULTS.teaserPlayback,
teaserAudio: (data as any).teaserAudio ?? DEFAULTS.teaserAudio,
lowDiskPauseBelowGB: (data as any).lowDiskPauseBelowGB ?? DEFAULTS.lowDiskPauseBelowGB,
enableNotifications: (data as any).enableNotifications ?? DEFAULTS.enableNotifications,
})
})
.catch(() => {
@ -186,6 +188,7 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
: DEFAULTS.teaserPlayback
const teaserAudio = !!value.teaserAudio
const lowDiskPauseBelowGB = Math.max(1, Math.floor(Number(value.lowDiskPauseBelowGB ?? DEFAULTS.lowDiskPauseBelowGB)))
const enableNotifications = !!value.enableNotifications
setSaving(true)
try {
@ -206,6 +209,7 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
teaserPlayback,
teaserAudio,
lowDiskPauseBelowGB,
enableNotifications,
}),
})
if (!res.ok) {
@ -540,6 +544,13 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
description="Wenn aktiv, werden Vorschau/Teaser nicht stumm geschaltet."
/>
<LabeledSwitch
checked={!!value.enableNotifications}
onChange={(checked) => setValue((v) => ({ ...v, enableNotifications: checked }))}
label="Benachrichtigungen"
description="Wenn aktiv, zeigt das Frontend Toasts (z.B. wenn watched Models online/live gehen oder wenn ein queued Model wieder public wird)."
/>
<div className="rounded-xl border border-gray-200 bg-gray-50 p-3 dark:border-white/10 dark:bg-white/5">
<div className="flex items-start justify-between gap-3">
<div>

View File

@ -1,3 +1,5 @@
// frontend\src\components\ui\ToastProvider.tsx
'use client'
import * as React from 'react'
@ -17,6 +19,8 @@ export type Toast = {
type: ToastType
title?: string
message?: string
imageUrl?: string
imageAlt?: string
durationMs?: number // auto close
}
@ -83,6 +87,36 @@ export function ToastProvider({
position?: 'bottom-right' | 'top-right' | 'bottom-left' | 'top-left'
}) {
const [toasts, setToasts] = React.useState<Toast[]>([])
const [notificationsEnabled, setNotificationsEnabled] = React.useState(true)
const loadNotificationSetting = React.useCallback(async () => {
try {
const r = await fetch('/api/settings', { cache: 'no-store' })
if (!r.ok) return
const data = await r.json()
setNotificationsEnabled(!!(data?.enableNotifications ?? true))
} catch {
// ignorieren -> default true
}
}, [])
React.useEffect(() => {
// initial laden
loadNotificationSetting()
// nach "Speichern" in Settings neu laden
const onUpdated = () => loadNotificationSetting()
window.addEventListener('recorder-settings-updated', onUpdated)
return () => window.removeEventListener('recorder-settings-updated', onUpdated)
}, [loadNotificationSetting])
// optional: wenn deaktiviert, alle aktuellen Toasts ausblenden
React.useEffect(() => {
if (!notificationsEnabled) {
// ✅ Nur nicht-Fehler ausblenden, Fehler dürfen bleiben
setToasts((prev) => prev.filter((t) => t.type === 'error'))
}
}, [notificationsEnabled])
const remove = React.useCallback((id: string) => {
setToasts((prev) => prev.filter((t) => t.id !== id))
@ -92,6 +126,9 @@ export function ToastProvider({
const push = React.useCallback(
(t: Omit<Toast, 'id'>) => {
// ✅ Errors IMMER zeigen, alles andere abhängig vom Toggle
if (!notificationsEnabled && t.type !== 'error') return ''
const id = uid()
const durationMs = t.durationMs ?? defaultDurationMs
@ -106,7 +143,7 @@ export function ToastProvider({
return id
},
[defaultDurationMs, maxToasts, remove]
[defaultDurationMs, maxToasts, remove, notificationsEnabled]
)
const ctx = React.useMemo<ToastContextValue>(() => ({ push, remove, clear }), [push, remove, clear])
@ -148,6 +185,8 @@ export function ToastProvider({
const { Icon, cls } = iconFor(t.type)
const title = (t.title || '').trim() || titleDefault(t.type)
const msg = (t.message || '').trim()
const img = (t.imageUrl || '').trim()
const imgAlt = (t.imageAlt || title).trim()
return (
<Transition key={t.id} appear show={true}>
@ -168,9 +207,24 @@ export function ToastProvider({
>
<div className="p-4">
<div className="flex items-start gap-3">
{img ? (
<div className="shrink-0">
<img
src={img}
alt={imgAlt}
loading="lazy"
referrerPolicy="no-referrer"
className={[
'h-12 w-12 rounded-lg object-cover',
'ring-1 ring-black/10 dark:ring-white/10',
].join(' ')}
/>
</div>
) : (
<div className="shrink-0">
<Icon className={['size-6', cls].join(' ')} aria-hidden="true" />
</div>
)}
<div className="min-w-0 flex-1">
<p className="text-sm font-semibold text-gray-900 dark:text-white">

View File

@ -1,3 +1,5 @@
// frontend\src\main.tsx
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import './index.css'