This commit is contained in:
Linrador 2026-02-24 18:30:30 +01:00
parent a2891a2cf5
commit 160544a65d
28 changed files with 2286 additions and 877 deletions

View File

@ -1,5 +1,4 @@
// backend\chaturbate_online.go // backend\chaturbate_online.go
package main package main
import ( import (
@ -22,7 +21,6 @@ import (
const chaturbateOnlineRoomsURL = "https://chaturbate.com/affiliates/api/onlinerooms/?format=json&wm=827SM" const chaturbateOnlineRoomsURL = "https://chaturbate.com/affiliates/api/onlinerooms/?format=json&wm=827SM"
// ChaturbateRoom bildet die Felder ab, die die Online-Rooms API liefert. // ChaturbateRoom bildet die Felder ab, die die Online-Rooms API liefert.
// (Du kannst das später problemlos erweitern, wenn du weitere Felder brauchst.)
type ChaturbateRoom struct { type ChaturbateRoom struct {
Gender string `json:"gender"` Gender string `json:"gender"`
Location string `json:"location"` Location string `json:"location"`
@ -75,7 +73,7 @@ type chaturbateCache struct {
LiteByUser map[string]ChaturbateOnlineRoomLite LiteByUser map[string]ChaturbateOnlineRoomLite
FetchedAt time.Time FetchedAt time.Time
LastAttempt time.Time // ✅ wichtig für Bootstrap-Cooldown (siehe Punkt 2) LastAttempt time.Time
LastErr string LastErr string
} }
@ -84,7 +82,7 @@ var (
cbMu sync.RWMutex cbMu sync.RWMutex
cb chaturbateCache cb chaturbateCache
// ✅ Optional: ModelStore, um Tags aus der Online-API zu übernehmen // ✅ Optional: ModelStore, um Tags/Bilder/Status aus der Online-API zu übernehmen
cbModelStore *ModelStore cbModelStore *ModelStore
) )
@ -232,11 +230,16 @@ func indexLiteByUser(rooms []ChaturbateRoom) map[string]ChaturbateOnlineRoomLite
if u == "" { if u == "" {
continue continue
} }
img := strings.TrimSpace(rm.ImageURL360)
if img == "" {
img = strings.TrimSpace(rm.ImageURL)
}
m[u] = ChaturbateOnlineRoomLite{ m[u] = ChaturbateOnlineRoomLite{
Username: rm.Username, Username: rm.Username,
CurrentShow: rm.CurrentShow, CurrentShow: rm.CurrentShow,
ChatRoomURL: rm.ChatRoomURL, ChatRoomURL: rm.ChatRoomURL,
ImageURL: rm.ImageURL, ImageURL: img,
Gender: rm.Gender, Gender: rm.Gender,
Country: rm.Country, Country: rm.Country,
@ -248,26 +251,145 @@ func indexLiteByUser(rooms []ChaturbateRoom) map[string]ChaturbateOnlineRoomLite
return m return m
} }
// startChaturbateOnlinePoller pollt die API alle paar Sekunden, // --- Profilbild Download + Persist (online -> offline) ---
// aber nur, wenn der Settings-Switch "useChaturbateApi" aktiviert ist.
func selectBestRoomImageURL(rm ChaturbateRoom) string {
if v := strings.TrimSpace(rm.ImageURL360); v != "" {
return v
}
if v := strings.TrimSpace(rm.ImageURL); v != "" {
return v
}
return ""
}
func fetchProfileImageBytes(ctx context.Context, rawURL string) (mime string, data []byte, err error) {
u := strings.TrimSpace(rawURL)
if u == "" {
return "", nil, fmt.Errorf("empty image url")
}
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u, nil)
if err != nil {
return "", nil, err
}
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64)")
req.Header.Set("Accept", "image/*,*/*;q=0.8")
resp, err := cbHTTP.Do(req)
if err != nil {
return "", nil, err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
b, _ := io.ReadAll(io.LimitReader(resp.Body, 2048))
return "", nil, fmt.Errorf("image fetch HTTP %d: %s", resp.StatusCode, strings.TrimSpace(string(b)))
}
// Sicherheitslimit (Profilbilder sind klein)
const maxImageBytes = 4 << 20 // 4 MiB
b, err := io.ReadAll(io.LimitReader(resp.Body, maxImageBytes+1))
if err != nil {
return "", nil, err
}
if len(b) == 0 {
return "", nil, fmt.Errorf("empty image body")
}
if len(b) > maxImageBytes {
return "", nil, fmt.Errorf("image too large")
}
ct := strings.TrimSpace(strings.ToLower(resp.Header.Get("Content-Type")))
if i := strings.Index(ct, ";"); i >= 0 {
ct = strings.TrimSpace(ct[:i])
}
return ct, b, nil
}
func persistOfflineTransitions(prevRoomsByUser, newRoomsByUser map[string]ChaturbateRoom, fetchedAt time.Time) {
if cbModelStore == nil || prevRoomsByUser == nil {
return
}
seenAt := fetchedAt.UTC().Format(time.RFC3339Nano)
for userLower, prevRm := range prevRoomsByUser {
// war vorher online und ist jetzt noch online => kein Offline-Transition
if _, stillOnline := newRoomsByUser[userLower]; stillOnline {
continue
}
username := strings.TrimSpace(prevRm.Username)
if username == "" {
username = strings.TrimSpace(userLower)
}
if username == "" {
continue
}
// 1) Offline Status persistieren
_ = cbModelStore.SetLastSeenOnline("chaturbate.com", username, false, seenAt)
// 2) Letztes bekanntes Profilbild persistieren
imgURL := selectBestRoomImageURL(prevRm)
if imgURL == "" {
continue
}
// URL immer merken (Fallback / Diagnose)
_ = cbModelStore.SetProfileImageURLOnly("chaturbate.com", username, imgURL, seenAt)
// Blob speichern (best effort)
ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)
mime, data, err := fetchProfileImageBytes(ctx, imgURL)
cancel()
if err != nil || len(data) == 0 {
continue
}
_ = cbModelStore.SetProfileImage("chaturbate.com", username, imgURL, mime, data, seenAt)
}
}
// cbApplySnapshot ersetzt atomar den Cache-Snapshot und triggert anschließend
// offline-transition persist (best effort, außerhalb des Locks).
func cbApplySnapshot(rooms []ChaturbateRoom) time.Time {
var prevRoomsByUser map[string]ChaturbateRoom
newRoomsByUser := indexRoomsByUser(rooms)
newLiteByUser := indexLiteByUser(rooms)
fetchedAtNow := time.Now()
cbMu.Lock()
if cb.RoomsByUser != nil {
prevRoomsByUser = cb.RoomsByUser
}
cb.LastErr = ""
cb.Rooms = rooms
cb.RoomsByUser = newRoomsByUser
cb.LiteByUser = newLiteByUser
cb.FetchedAt = fetchedAtNow
cbMu.Unlock()
// Offline-Transitions bewusst außerhalb des Locks
if cbModelStore != nil && prevRoomsByUser != nil {
go persistOfflineTransitions(prevRoomsByUser, newRoomsByUser, fetchedAtNow)
}
return fetchedAtNow
}
// startChaturbateOnlinePoller pollt die API alle paar Sekunden, // startChaturbateOnlinePoller pollt die API alle paar Sekunden,
// aber nur, wenn der Settings-Switch "useChaturbateApi" aktiviert ist. // aber nur, wenn der Settings-Switch "useChaturbateApi" aktiviert ist.
func startChaturbateOnlinePoller(store *ModelStore) { func startChaturbateOnlinePoller(store *ModelStore) {
// ✅ etwas langsamer pollen (weniger Last)
const interval = 10 * time.Second const interval = 10 * time.Second
// ✅ Tags-Fill ist teuer -> max alle 10 Minuten
const tagsFillEvery = 10 * time.Minute const tagsFillEvery = 10 * time.Minute
// nur loggen, wenn sich etwas ändert (sonst spammt es)
lastLoggedCount := -1 lastLoggedCount := -1
lastLoggedErr := "" lastLoggedErr := ""
// Tags-Fill Throttle (lokal in der Funktion)
var tagsMu sync.Mutex var tagsMu sync.Mutex
var tagsLast time.Time var tagsLast time.Time
// sofort ein initialer Tick
first := time.NewTimer(0) first := time.NewTimer(0)
defer first.Stop() defer first.Stop()
@ -284,7 +406,7 @@ func startChaturbateOnlinePoller(store *ModelStore) {
continue continue
} }
// immer merken: wir haben es versucht (hilft dem Handler beim Bootstrap-Cooldown) // immer merken: wir haben es versucht
cbMu.Lock() cbMu.Lock()
cb.LastAttempt = time.Now() cb.LastAttempt = time.Now()
cbMu.Unlock() cbMu.Unlock()
@ -293,17 +415,14 @@ func startChaturbateOnlinePoller(store *ModelStore) {
rooms, err := fetchChaturbateOnlineRooms(ctx) rooms, err := fetchChaturbateOnlineRooms(ctx)
cancel() cancel()
cbMu.Lock()
if err != nil { if err != nil {
// ❗bei Fehler NICHT fetchedAt aktualisieren, cbMu.Lock()
// sonst wirkt der Cache "frisch", obwohl rooms alt sind.
cb.LastErr = err.Error() cb.LastErr = err.Error()
// damit offline Models nicht hängen bleiben: Cache leeren // Fehler => Cache leeren (damit offline nicht hängen bleibt)
cb.Rooms = nil cb.Rooms = nil
cb.RoomsByUser = nil cb.RoomsByUser = nil
cb.LiteByUser = nil cb.LiteByUser = nil
cbMu.Unlock() cbMu.Unlock()
if cb.LastErr != lastLoggedErr { if cb.LastErr != lastLoggedErr {
@ -313,16 +432,9 @@ func startChaturbateOnlinePoller(store *ModelStore) {
continue continue
} }
// ✅ Erfolg: komplette Liste ersetzen + indices + fetchedAt setzen _ = cbApplySnapshot(rooms)
cb.LastErr = ""
cb.Rooms = rooms
cb.RoomsByUser = indexRoomsByUser(rooms)
cb.LiteByUser = indexLiteByUser(rooms)
cb.FetchedAt = time.Now()
cbMu.Unlock() // Tags übernehmen ist teuer -> nur selten + im Hintergrund
// ✅ Tags übernehmen ist teuer -> nur selten + im Hintergrund
if cbModelStore != nil && len(rooms) > 0 { if cbModelStore != nil && len(rooms) > 0 {
shouldFill := false shouldFill := false
@ -418,7 +530,7 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
var shows []string var shows []string
// --------------------------- // ---------------------------
// Filter state (muss vor GET/POST da sein) // Filter state
// --------------------------- // ---------------------------
var ( var (
allowedShow map[string]bool allowedShow map[string]bool
@ -449,7 +561,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
wantRefresh = req.Refresh wantRefresh = req.Refresh
// ✅ neue Filter übernehmen (POST)
genders := normalizeList(req.Gender) genders := normalizeList(req.Gender)
countries := normalizeList(req.Country) countries := normalizeList(req.Country)
tagsAny := normalizeList(req.TagsAny) tagsAny := normalizeList(req.TagsAny)
@ -461,7 +572,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
allowedCountry = toSet(countries) allowedCountry = toSet(countries)
allowedTagsAny = toSet(tagsAny) allowedTagsAny = toSet(tagsAny)
// normalize users
seenU := map[string]bool{} seenU := map[string]bool{}
for _, u := range req.Q { for _, u := range req.Q {
u = strings.ToLower(strings.TrimSpace(u)) u = strings.ToLower(strings.TrimSpace(u))
@ -473,7 +583,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
} }
sort.Strings(users) sort.Strings(users)
// normalize shows
seenS := map[string]bool{} seenS := map[string]bool{}
for _, s := range req.Show { for _, s := range req.Show {
s = strings.ToLower(strings.TrimSpace(s)) s = strings.ToLower(strings.TrimSpace(s))
@ -518,28 +627,24 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
sort.Strings(shows) sort.Strings(shows)
} }
// ✅ gender=...
qGender := strings.TrimSpace(r.URL.Query().Get("gender")) qGender := strings.TrimSpace(r.URL.Query().Get("gender"))
if qGender != "" { if qGender != "" {
genders := normalizeList(strings.Split(qGender, ",")) genders := normalizeList(strings.Split(qGender, ","))
allowedGender = toSet(genders) allowedGender = toSet(genders)
} }
// ✅ country=...
qCountry := strings.TrimSpace(r.URL.Query().Get("country")) qCountry := strings.TrimSpace(r.URL.Query().Get("country"))
if qCountry != "" { if qCountry != "" {
countries := normalizeList(strings.Split(qCountry, ",")) countries := normalizeList(strings.Split(qCountry, ","))
allowedCountry = toSet(countries) allowedCountry = toSet(countries)
} }
// ✅ tagsAny=...
qTagsAny := strings.TrimSpace(r.URL.Query().Get("tagsAny")) qTagsAny := strings.TrimSpace(r.URL.Query().Get("tagsAny"))
if qTagsAny != "" { if qTagsAny != "" {
tagsAny := normalizeList(strings.Split(qTagsAny, ",")) tagsAny := normalizeList(strings.Split(qTagsAny, ","))
allowedTagsAny = toSet(tagsAny) allowedTagsAny = toSet(tagsAny)
} }
// ✅ minUsers=123
qMinUsers := strings.TrimSpace(r.URL.Query().Get("minUsers")) qMinUsers := strings.TrimSpace(r.URL.Query().Get("minUsers"))
if qMinUsers != "" { if qMinUsers != "" {
if n, err := strconv.Atoi(qMinUsers); err == nil { if n, err := strconv.Atoi(qMinUsers); err == nil {
@ -547,7 +652,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
} }
} }
// ✅ isHD=1/true/yes
qIsHD := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("isHD"))) qIsHD := strings.ToLower(strings.TrimSpace(r.URL.Query().Get("isHD")))
if qIsHD != "" { if qIsHD != "" {
b := (qIsHD == "1" || qIsHD == "true" || qIsHD == "yes") b := (qIsHD == "1" || qIsHD == "true" || qIsHD == "yes")
@ -556,10 +660,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
allowedShow = toSet(shows) allowedShow = toSet(shows)
} }
// ---------------------------
// Ultra-wichtig: niemals die komplette Affiliate-Liste ausliefern.
// Wenn keine Users angegeben sind -> leere Antwort (spart massiv CPU + JSON)
// ---------------------------
onlySpecificUsers := len(users) > 0 onlySpecificUsers := len(users) > 0
// --------------------------- // ---------------------------
@ -570,7 +670,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
"users="+strings.Join(users, ","), "users="+strings.Join(users, ","),
"show="+strings.Join(keysOfSet(allowedShow), ","), "show="+strings.Join(keysOfSet(allowedShow), ","),
// ✅ neue Filter in den Key!
"gender="+strings.Join(keysOfSet(allowedGender), ","), "gender="+strings.Join(keysOfSet(allowedGender), ","),
"country="+strings.Join(keysOfSet(allowedCountry), ","), "country="+strings.Join(keysOfSet(allowedCountry), ","),
"tagsAny="+strings.Join(keysOfSet(allowedTagsAny), ","), "tagsAny="+strings.Join(keysOfSet(allowedTagsAny), ","),
@ -609,23 +708,20 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
} }
// --------------------------- // ---------------------------
// Snapshot Cache (nur Lite-Index nutzen) // Snapshot Cache lesen (nur Lite)
// --------------------------- // ---------------------------
cbMu.RLock() cbMu.RLock()
fetchedAt := cb.FetchedAt fetchedAt := cb.FetchedAt
lastErr := cb.LastErr lastErr := cb.LastErr
lastAttempt := cb.LastAttempt lastAttempt := cb.LastAttempt
liteByUser := cb.LiteByUser // map[usernameLower]ChaturbateRoomLite liteByUser := cb.LiteByUser
cbMu.RUnlock() cbMu.RUnlock()
// --------------------------- // ---------------------------
// Persist "last seen online/offline" für explizit angefragte User // Persist "last seen online/offline" für explizit angefragte User
// (nur wenn wir einen gültigen Snapshot haben)
// --------------------------- // ---------------------------
if cbModelStore != nil && onlySpecificUsers && liteByUser != nil && !fetchedAt.IsZero() { if cbModelStore != nil && onlySpecificUsers && liteByUser != nil && !fetchedAt.IsZero() {
seenAt := fetchedAt.UTC().Format(time.RFC3339Nano) seenAt := fetchedAt.UTC().Format(time.RFC3339Nano)
// Persistiert den tatsächlichen Snapshot-Status (unabhängig von Filtern)
for _, u := range users { for _, u := range users {
_, isOnline := liteByUser[u] _, isOnline := liteByUser[u]
_ = cbModelStore.SetLastSeenOnline("chaturbate.com", u, isOnline, seenAt) _ = cbModelStore.SetLastSeenOnline("chaturbate.com", u, isOnline, seenAt)
@ -633,17 +729,12 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
} }
// --------------------------- // ---------------------------
// Refresh/Bootstrap-Strategie: // Refresh/Bootstrap-Strategie
// - Handler blockiert NICHT auf Remote-Fetch (Performance!)
// - wenn refresh=true: triggert einen Fetch (best effort), aber liefert sofort Cache/leer zurück
// - wenn Cache noch nie erfolgreich war: "warming up" + best-effort Bootstrap, mit Cooldown
// --------------------------- // ---------------------------
const bootstrapCooldown = 8 * time.Second const bootstrapCooldown = 8 * time.Second
needBootstrap := fetchedAt.IsZero() needBootstrap := fetchedAt.IsZero()
shouldTriggerFetch := shouldTriggerFetch := wantRefresh || (needBootstrap && time.Since(lastAttempt) >= bootstrapCooldown)
wantRefresh ||
(needBootstrap && time.Since(lastAttempt) >= bootstrapCooldown)
if shouldTriggerFetch { if shouldTriggerFetch {
cbRefreshMu.Lock() cbRefreshMu.Lock()
@ -653,12 +744,10 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
cbRefreshInFlight = true cbRefreshInFlight = true
cbRefreshMu.Unlock() cbRefreshMu.Unlock()
// attempt timestamp sofort setzen (damit 100 Requests nicht alle triggern)
cbMu.Lock() cbMu.Lock()
cb.LastAttempt = time.Now() cb.LastAttempt = time.Now()
cbMu.Unlock() cbMu.Unlock()
// ✅ background fetch (nicht blockieren)
go func() { go func() {
defer func() { defer func() {
cbRefreshMu.Lock() cbRefreshMu.Lock()
@ -670,24 +759,20 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
rooms, err := fetchChaturbateOnlineRooms(ctx) rooms, err := fetchChaturbateOnlineRooms(ctx)
cancel() cancel()
cbMu.Lock()
if err != nil { if err != nil {
cbMu.Lock()
cb.LastErr = err.Error() cb.LastErr = err.Error()
cb.Rooms = nil cb.Rooms = nil
cb.RoomsByUser = nil cb.RoomsByUser = nil
cb.LiteByUser = nil cb.LiteByUser = nil
// fetchedAt NICHT ändern (bleibt letzte erfolgreiche Zeit) // fetchedAt NICHT ändern (bleibt letzte erfolgreiche Zeit)
} else { cbMu.Unlock()
cb.LastErr = "" return
cb.Rooms = rooms
cb.RoomsByUser = indexRoomsByUser(rooms)
cb.LiteByUser = indexLiteByUser(rooms) // ✅ kleiner Index für Handler
cb.FetchedAt = time.Now()
} }
cbMu.Unlock()
// Tags optional übernehmen (nur bei Erfolg) _ = cbApplySnapshot(rooms)
if cbModelStore != nil && err == nil && len(rooms) > 0 {
if cbModelStore != nil && len(rooms) > 0 {
cbModelStore.FillMissingTagsFromChaturbateOnline(rooms) cbModelStore.FillMissingTagsFromChaturbateOnline(rooms)
} }
}() }()
@ -741,7 +826,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
return true return true
} }
// ✅ total = Anzahl online rooms (gefiltert), ohne sie auszuliefern
total := 0 total := 0
if liteByUser != nil { if liteByUser != nil {
noExtraFilters := noExtraFilters :=
@ -783,7 +867,6 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
} }
} }
// wenn noch nie erfolgreich gefetched: nicer error
if needBootstrap && lastErr == "" { if needBootstrap && lastErr == "" {
lastErr = "warming up" lastErr = "warming up"
} }
@ -797,7 +880,7 @@ func chaturbateOnlineHandler(w http.ResponseWriter, r *http.Request) {
"count": len(outRooms), "count": len(outRooms),
"total": total, "total": total,
"lastError": lastErr, "lastError": lastErr,
"rooms": outRooms, // ✅ klein & schnell "rooms": outRooms,
} }
body, _ := json.Marshal(out) body, _ := json.Marshal(out)

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -327,6 +327,11 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU
// ---------------- // ----------------
// Preview (MP4 teaser clips) // Preview (MP4 teaser clips)
// ---------------- // ----------------
const (
previewClipLenSec = 0.75
previewMaxClips = 12
)
var computedPreviewClips []previewClip var computedPreviewClips []previewClip
if previewBefore { if previewBefore {
@ -346,7 +351,7 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU
progress(thumbsW + 0.05) progress(thumbsW + 0.05)
if err := generateTeaserClipsMP4WithProgress(genCtx, videoPath, previewPath, 0.75, 12, func(r float64) { if err := generateTeaserClipsMP4WithProgress(genCtx, videoPath, previewPath, previewClipLenSec, previewMaxClips, func(r float64) {
if r < 0 { if r < 0 {
r = 0 r = 0
} }
@ -372,16 +377,10 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU
return return
} }
// muss identisch zu generateTeaserClipsMP4WithProgress Defaults sein // exakt dieselben Werte wie beim tatsächlichen Preview-Rendern
opts := TeaserPreviewOptions{ opts := TeaserPreviewOptions{
Segments: 18, Segments: previewMaxClips,
SegmentDuration: 1.0, SegmentDuration: previewClipLenSec,
Width: 640,
Preset: "veryfast",
CRF: 21,
Audio: true,
AudioBitrate: "128k",
UseVsync2: false,
} }
starts, segDur, _ := computeTeaserStarts(meta.durSec, opts) starts, segDur, _ := computeTeaserStarts(meta.durSec, opts)

View File

@ -170,207 +170,6 @@ func probeVideoProps(ctx context.Context, filePath string) (w int, h int, fps fl
return w, h, fps, nil return w, h, fps, nil
} }
func metaJSONPathForAssetID(assetID string) (string, error) {
root, err := generatedMetaRoot()
if err != nil {
return "", err
}
if strings.TrimSpace(root) == "" {
return "", fmt.Errorf("generated/meta root leer")
}
return filepath.Join(root, assetID, "meta.json"), nil
}
func readVideoMetaIfValid(metaPath string, fi os.FileInfo) (*videoMeta, bool) {
b, err := os.ReadFile(metaPath)
if err != nil || len(b) == 0 {
return nil, false
}
var m videoMeta
if err := json.Unmarshal(b, &m); err != nil {
return nil, false
}
// nur akzeptieren wenn Datei identisch (damit wir nicht stale Werte zeigen)
if m.FileSize != fi.Size() || m.FileModUnix != fi.ModTime().Unix() {
return nil, false
}
// Mindestvalidierung
if m.DurationSeconds <= 0 {
return nil, false
}
return &m, true
}
func ensureVideoMetaForFile(ctx context.Context, fullPath string, fi os.FileInfo, sourceURL string) (*videoMeta, bool) {
// assetID aus Dateiname
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
// sanitize wie bei deinen generated Ordnern
var err error
assetID, err = sanitizeID(assetID)
if err != nil || assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil {
return nil, false
}
// 1) valid meta vorhanden?
if m, ok := readVideoMetaIfValid(metaPath, fi); ok {
return m, true
}
// 2) sonst neu erzeugen (mit Concurrency-Limit)
if ctx == nil {
ctx = context.Background()
}
cctx, cancel := context.WithTimeout(ctx, 8*time.Second)
defer cancel()
if durSem != nil {
if err := durSem.Acquire(cctx); err != nil {
return nil, false
}
defer durSem.Release()
}
// Dauer
dur, derr := durationSecondsCached(cctx, fullPath)
if derr != nil || dur <= 0 {
return nil, false
}
// Video props
w, h, fps, perr := probeVideoProps(cctx, fullPath)
if perr != nil {
// width/height/fps dürfen 0 bleiben, duration ist aber trotzdem nützlich
w, h, fps = 0, 0, 0
}
// meta dir anlegen
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
m := &videoMeta{
Version: 2,
DurationSeconds: dur,
FileSize: fi.Size(),
FileModUnix: fi.ModTime().Unix(),
VideoWidth: w,
VideoHeight: h,
FPS: fps,
Resolution: formatResolution(w, h),
SourceURL: strings.TrimSpace(sourceURL),
UpdatedAtUnix: time.Now().Unix(),
}
b, _ := json.MarshalIndent(m, "", " ")
b = append(b, '\n')
_ = atomicWriteFile(metaPath, b) // best effort
return m, true
}
func attachMetaToJobBestEffort(ctx context.Context, job *RecordJob, fullPath string) {
if job == nil {
return
}
fullPath = strings.TrimSpace(fullPath)
if fullPath == "" {
return
}
// Stat
fi, err := os.Stat(fullPath)
if err != nil || fi == nil || fi.IsDir() {
return
}
// Größe immer mitgeben (macht Sort/Anzeige einfacher)
if job.SizeBytes <= 0 {
job.SizeBytes = fi.Size()
}
// Meta.json lesen/erzeugen (best effort)
m, ok := ensureVideoMetaForFileBestEffort(ctx, fullPath, job.SourceURL)
if !ok || m == nil {
return
}
// Optional: komplettes Meta mitsenden
job.Meta = m
// Und zusätzlich die "Top-Level" Felder befüllen (für Frontend bequem)
if job.DurationSeconds <= 0 && m.DurationSeconds > 0 {
job.DurationSeconds = m.DurationSeconds
}
if job.VideoWidth <= 0 && m.VideoWidth > 0 {
job.VideoWidth = m.VideoWidth
}
if job.VideoHeight <= 0 && m.VideoHeight > 0 {
job.VideoHeight = m.VideoHeight
}
if job.FPS <= 0 && m.FPS > 0 {
job.FPS = m.FPS
}
}
// ensureVideoMetaForFileBestEffort:
// - versucht zuerst echtes Generieren (ffprobe/ffmpeg) via ensureVideoMetaForFile
// - wenn das fehlschlägt, aber durationSecondsCacheOnly schon was weiß:
// schreibt eine Duration-only meta.json, damit wir künftig "aus meta.json" lesen können.
func ensureVideoMetaForFileBestEffort(ctx context.Context, fullPath string, sourceURL string) (*videoMeta, bool) {
fullPath = strings.TrimSpace(fullPath)
if fullPath == "" {
return nil, false
}
fi, err := os.Stat(fullPath)
if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 {
return nil, false
}
// 1) Normaler Weg: meta erzeugen/lesen (ffprobe/ffmpeg)
if m, ok := ensureVideoMetaForFile(ctx, fullPath, fi, sourceURL); ok && m != nil {
return m, true
}
// 2) Fallback: wenn wir Duration schon im RAM-Cache haben -> meta.json (Duration-only) persistieren
dur := durationSecondsCacheOnly(fullPath, fi)
if dur <= 0 {
return nil, false
}
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil || strings.TrimSpace(metaPath) == "" {
return nil, false
}
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
_ = writeVideoMetaDuration(metaPath, fi, dur, sourceURL)
// nochmal lesen/validieren
if m, ok := readVideoMetaIfValid(metaPath, fi); ok && m != nil {
return m, true
}
return nil, false
}
func (d *dummyResponseWriter) Header() http.Header { func (d *dummyResponseWriter) Header() http.Header {
if d.h == nil { if d.h == nil {
d.h = make(http.Header) d.h = make(http.Header)

View File

@ -2,6 +2,7 @@
package main package main
import ( import (
"context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"os" "os"
@ -97,6 +98,207 @@ func readVideoMetaDuration(metaPath string, fi os.FileInfo) (float64, bool) {
return m.DurationSeconds, true return m.DurationSeconds, true
} }
func metaJSONPathForAssetID(assetID string) (string, error) {
root, err := generatedMetaRoot()
if err != nil {
return "", err
}
if strings.TrimSpace(root) == "" {
return "", fmt.Errorf("generated/meta root leer")
}
return filepath.Join(root, assetID, "meta.json"), nil
}
func ensureVideoMetaForFile(ctx context.Context, fullPath string, fi os.FileInfo, sourceURL string) (*videoMeta, bool) {
// assetID aus Dateiname
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
// sanitize wie bei deinen generated Ordnern
var err error
assetID, err = sanitizeID(assetID)
if err != nil || assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil {
return nil, false
}
// 1) valid meta vorhanden?
if m, ok := readVideoMetaIfValid(metaPath, fi); ok {
return m, true
}
// 2) sonst neu erzeugen (mit Concurrency-Limit)
if ctx == nil {
ctx = context.Background()
}
cctx, cancel := context.WithTimeout(ctx, 8*time.Second)
defer cancel()
if durSem != nil {
if err := durSem.Acquire(cctx); err != nil {
return nil, false
}
defer durSem.Release()
}
// Dauer
dur, derr := durationSecondsCached(cctx, fullPath)
if derr != nil || dur <= 0 {
return nil, false
}
// Video props
w, h, fps, perr := probeVideoProps(cctx, fullPath)
if perr != nil {
// width/height/fps dürfen 0 bleiben, duration ist aber trotzdem nützlich
w, h, fps = 0, 0, 0
}
// meta dir anlegen
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
m := &videoMeta{
Version: 2,
DurationSeconds: dur,
FileSize: fi.Size(),
FileModUnix: fi.ModTime().Unix(),
VideoWidth: w,
VideoHeight: h,
FPS: fps,
Resolution: formatResolution(w, h),
SourceURL: strings.TrimSpace(sourceURL),
UpdatedAtUnix: time.Now().Unix(),
}
b, _ := json.MarshalIndent(m, "", " ")
b = append(b, '\n')
_ = atomicWriteFile(metaPath, b) // best effort
return m, true
}
func attachMetaToJobBestEffort(ctx context.Context, job *RecordJob, fullPath string) {
if job == nil {
return
}
fullPath = strings.TrimSpace(fullPath)
if fullPath == "" {
return
}
// Stat
fi, err := os.Stat(fullPath)
if err != nil || fi == nil || fi.IsDir() {
return
}
// Größe immer mitgeben (macht Sort/Anzeige einfacher)
if job.SizeBytes <= 0 {
job.SizeBytes = fi.Size()
}
// Meta.json lesen/erzeugen (best effort)
m, ok := ensureVideoMetaForFileBestEffort(ctx, fullPath, job.SourceURL)
if !ok || m == nil {
return
}
// Optional: komplettes Meta mitsenden
job.Meta = m
// Und zusätzlich die "Top-Level" Felder befüllen (für Frontend bequem)
if job.DurationSeconds <= 0 && m.DurationSeconds > 0 {
job.DurationSeconds = m.DurationSeconds
}
if job.VideoWidth <= 0 && m.VideoWidth > 0 {
job.VideoWidth = m.VideoWidth
}
if job.VideoHeight <= 0 && m.VideoHeight > 0 {
job.VideoHeight = m.VideoHeight
}
if job.FPS <= 0 && m.FPS > 0 {
job.FPS = m.FPS
}
}
// ensureVideoMetaForFileBestEffort:
// - versucht zuerst echtes Generieren (ffprobe/ffmpeg) via ensureVideoMetaForFile
// - wenn das fehlschlägt, aber durationSecondsCacheOnly schon was weiß:
// schreibt eine Duration-only meta.json, damit wir künftig "aus meta.json" lesen können.
func ensureVideoMetaForFileBestEffort(ctx context.Context, fullPath string, sourceURL string) (*videoMeta, bool) {
fullPath = strings.TrimSpace(fullPath)
if fullPath == "" {
return nil, false
}
fi, err := os.Stat(fullPath)
if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 {
return nil, false
}
// 1) Normaler Weg: meta erzeugen/lesen (ffprobe/ffmpeg)
if m, ok := ensureVideoMetaForFile(ctx, fullPath, fi, sourceURL); ok && m != nil {
return m, true
}
// 2) Fallback: wenn wir Duration schon im RAM-Cache haben -> meta.json (Duration-only) persistieren
dur := durationSecondsCacheOnly(fullPath, fi)
if dur <= 0 {
return nil, false
}
stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath))
assetID := stripHotPrefix(strings.TrimSpace(stem))
if assetID == "" {
return nil, false
}
metaPath, err := metaJSONPathForAssetID(assetID)
if err != nil || strings.TrimSpace(metaPath) == "" {
return nil, false
}
_ = os.MkdirAll(filepath.Dir(metaPath), 0o755)
_ = writeVideoMetaDuration(metaPath, fi, dur, sourceURL)
// nochmal lesen/validieren
if m, ok := readVideoMetaIfValid(metaPath, fi); ok && m != nil {
return m, true
}
return nil, false
}
func readVideoMetaIfValid(metaPath string, fi os.FileInfo) (*videoMeta, bool) {
b, err := os.ReadFile(metaPath)
if err != nil || len(b) == 0 {
return nil, false
}
var m videoMeta
if err := json.Unmarshal(b, &m); err != nil {
return nil, false
}
if m.Version != 1 && m.Version != 2 {
return nil, false
}
if m.FileSize != fi.Size() || m.FileModUnix != fi.ModTime().Unix() {
return nil, false
}
if m.DurationSeconds <= 0 {
return nil, false
}
return &m, true
}
func readVideoMetaSourceURL(metaPath string, fi os.FileInfo) (string, bool) { func readVideoMetaSourceURL(metaPath string, fi os.FileInfo) (string, bool) {
m, ok := readVideoMetaIfValid(metaPath, fi) m, ok := readVideoMetaIfValid(metaPath, fi)
if !ok || m == nil { if !ok || m == nil {
@ -114,6 +316,11 @@ func writeVideoMeta(metaPath string, fi os.FileInfo, dur float64, w int, h int,
if strings.TrimSpace(metaPath) == "" || dur <= 0 { if strings.TrimSpace(metaPath) == "" || dur <= 0 {
return nil return nil
} }
var existing *videoMeta
if old, ok := readVideoMetaIfValid(metaPath, fi); ok && old != nil {
existing = old
}
m := videoMeta{ m := videoMeta{
Version: 2, Version: 2,
DurationSeconds: dur, DurationSeconds: dur,
@ -125,6 +332,15 @@ func writeVideoMeta(metaPath string, fi os.FileInfo, dur float64, w int, h int,
Resolution: formatResolution(w, h), Resolution: formatResolution(w, h),
SourceURL: strings.TrimSpace(sourceURL), SourceURL: strings.TrimSpace(sourceURL),
UpdatedAtUnix: time.Now().Unix(), UpdatedAtUnix: time.Now().Unix(),
// ✅ bestehende Preview-Daten behalten
PreviewClips: nil,
PreviewSprite: nil,
}
if existing != nil {
m.PreviewClips = existing.PreviewClips
m.PreviewSprite = existing.PreviewSprite
} }
buf, err := json.Marshal(m) buf, err := json.Marshal(m)
if err != nil { if err != nil {
@ -138,6 +354,11 @@ func writeVideoMetaWithPreviewClips(metaPath string, fi os.FileInfo, dur float64
if strings.TrimSpace(metaPath) == "" || dur <= 0 { if strings.TrimSpace(metaPath) == "" || dur <= 0 {
return nil return nil
} }
var existing *videoMeta
if old, ok := readVideoMetaIfValid(metaPath, fi); ok && old != nil {
existing = old
}
m := videoMeta{ m := videoMeta{
Version: 2, Version: 2,
DurationSeconds: dur, DurationSeconds: dur,
@ -151,6 +372,11 @@ func writeVideoMetaWithPreviewClips(metaPath string, fi os.FileInfo, dur float64
PreviewClips: clips, PreviewClips: clips,
UpdatedAtUnix: time.Now().Unix(), UpdatedAtUnix: time.Now().Unix(),
} }
// ✅ vorhandenes Sprite (inkl. stepSeconds) nicht wegwerfen
if existing != nil && existing.PreviewSprite != nil {
m.PreviewSprite = existing.PreviewSprite
}
buf, err := json.Marshal(m) buf, err := json.Marshal(m)
if err != nil { if err != nil {
return err return err
@ -189,6 +415,17 @@ func writeVideoMetaWithPreviewClipsAndSprite(
UpdatedAtUnix: time.Now().Unix(), UpdatedAtUnix: time.Now().Unix(),
} }
if sprite == nil {
if old, ok := readVideoMetaIfValid(metaPath, fi); ok && old != nil && old.PreviewSprite != nil {
m.PreviewSprite = old.PreviewSprite
}
}
if len(clips) == 0 {
if old, ok := readVideoMetaIfValid(metaPath, fi); ok && old != nil && len(old.PreviewClips) > 0 {
m.PreviewClips = old.PreviewClips
}
}
buf, err := json.Marshal(m) buf, err := json.Marshal(m)
if err != nil { if err != nil {
return err return err

View File

@ -11,6 +11,7 @@ import (
"net/url" "net/url"
"strconv" "strconv"
"strings" "strings"
"time"
) )
// ✅ umbenannt, damit es nicht mit models.go kollidiert // ✅ umbenannt, damit es nicht mit models.go kollidiert
@ -157,8 +158,8 @@ func importModelsCSV(store *ModelStore, r io.Reader, kind string) (importResult,
} }
get := func(key string) string { get := func(key string) string {
i := idx[key] i, ok := idx[key]
if i < 0 || i >= len(rec) { if !ok || i < 0 || i >= len(rec) {
return "" return ""
} }
return strings.TrimSpace(rec[i]) return strings.TrimSpace(rec[i])
@ -240,18 +241,111 @@ func RegisterModelAPI(mux *http.ServeMux, store *ModelStore) {
}) })
mux.HandleFunc("/api/models/meta", func(w http.ResponseWriter, r *http.Request) { mux.HandleFunc("/api/models/meta", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
return
}
modelsWriteJSON(w, http.StatusOK, store.Meta()) modelsWriteJSON(w, http.StatusOK, store.Meta())
}) })
mux.HandleFunc("/api/models/watched", func(w http.ResponseWriter, r *http.Request) { mux.HandleFunc("/api/models/watched", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
return
}
host := strings.TrimSpace(r.URL.Query().Get("host")) host := strings.TrimSpace(r.URL.Query().Get("host"))
modelsWriteJSON(w, http.StatusOK, store.ListWatchedLite(host)) modelsWriteJSON(w, http.StatusOK, store.ListWatchedLite(host))
}) })
mux.HandleFunc("/api/models/list", func(w http.ResponseWriter, r *http.Request) { mux.HandleFunc("/api/models", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
return
}
modelsWriteJSON(w, http.StatusOK, store.List()) modelsWriteJSON(w, http.StatusOK, store.List())
}) })
// ✅ Profilbild-Blob aus DB ausliefern
mux.HandleFunc("/api/models/image", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
return
}
id := strings.TrimSpace(r.URL.Query().Get("id"))
if id == "" {
modelsWriteJSON(w, http.StatusBadRequest, map[string]string{"error": "id fehlt"})
return
}
mime, data, ok, err := store.GetProfileImageByID(id)
if err != nil {
modelsWriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
if !ok || len(data) == 0 {
http.NotFound(w, r)
return
}
w.Header().Set("Content-Type", mime)
w.Header().Set("Cache-Control", "public, max-age=86400")
_, _ = w.Write(data)
})
// ✅ Profilbild hochladen/ersetzen (Blob + URL speichern)
mux.HandleFunc("/api/models/profile-image", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
return
}
if err := r.ParseMultipartForm(10 << 20); err != nil {
modelsWriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid multipart form"})
return
}
host := strings.TrimSpace(r.FormValue("host"))
modelKey := strings.TrimSpace(r.FormValue("modelKey"))
sourceURL := strings.TrimSpace(r.FormValue("sourceUrl"))
if modelKey == "" {
modelsWriteJSON(w, http.StatusBadRequest, map[string]string{"error": "modelKey fehlt"})
return
}
f, _, err := r.FormFile("file")
if err != nil {
modelsWriteJSON(w, http.StatusBadRequest, map[string]string{"error": "missing file"})
return
}
defer f.Close()
data, err := io.ReadAll(io.LimitReader(f, 8<<20))
if err != nil || len(data) == 0 {
modelsWriteJSON(w, http.StatusBadRequest, map[string]string{"error": "invalid image"})
return
}
mime := http.DetectContentType(data)
if !strings.HasPrefix(mime, "image/") {
modelsWriteJSON(w, http.StatusBadRequest, map[string]string{"error": "file is not an image"})
return
}
if err := store.SetProfileImage(host, modelKey, sourceURL, mime, data, time.Now().UTC().Format(time.RFC3339Nano)); err != nil {
modelsWriteJSON(w, http.StatusInternalServerError, map[string]string{"error": err.Error()})
return
}
m, err := store.EnsureByHostModelKey(host, modelKey)
if err != nil {
modelsWriteJSON(w, http.StatusOK, map[string]any{"ok": true})
return
}
modelsWriteJSON(w, http.StatusOK, m)
})
mux.HandleFunc("/api/models/upsert", func(w http.ResponseWriter, r *http.Request) { mux.HandleFunc("/api/models/upsert", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost { if r.Method != http.MethodPost {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"}) modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
@ -278,7 +372,6 @@ func RegisterModelAPI(mux *http.ServeMux, store *ModelStore) {
}) })
// ✅ NEU: Ensure-Endpoint (für QuickActions aus FinishedDownloads) // ✅ NEU: Ensure-Endpoint (für QuickActions aus FinishedDownloads)
// Erst versucht er ein bestehendes Model via modelKey zu finden, sonst legt er ein "manual" Model an.
mux.HandleFunc("/api/models/ensure", func(w http.ResponseWriter, r *http.Request) { mux.HandleFunc("/api/models/ensure", func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost { if r.Method != http.MethodPost {
modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"}) modelsWriteJSON(w, http.StatusMethodNotAllowed, map[string]string{"error": "method not allowed"})
@ -379,11 +472,10 @@ func RegisterModelAPI(mux *http.ServeMux, store *ModelStore) {
return return
} }
// ✅ Wenn ein Model weder beobachtet noch favorisiert/geliked ist, fliegt es aus dem Store. // ✅ Cleanup wenn kein relevanter Flag mehr gesetzt ist
// (Damit bleibt der Store „sauber“ und ModelsTab listet nur relevante Einträge.)
likedOn := (m.Liked != nil && *m.Liked) likedOn := (m.Liked != nil && *m.Liked)
if !m.Watching && !m.Favorite && !likedOn { if !m.Watching && !m.Favorite && !likedOn {
_ = store.Delete(m.ID) // best-effort: Patch war erfolgreich, Delete darf hier nicht „fatal“ sein _ = store.Delete(m.ID)
w.WriteHeader(http.StatusNoContent) w.WriteHeader(http.StatusNoContent)
return return
} }

View File

@ -5,6 +5,7 @@ import (
"database/sql" "database/sql"
"encoding/json" "encoding/json"
"errors" "errors"
"net/http"
"net/url" "net/url"
"os" "os"
"path/filepath" "path/filepath"
@ -27,6 +28,10 @@ type StoredModel struct {
LastSeenOnline *bool `json:"lastSeenOnline,omitempty"` // nil = unbekannt LastSeenOnline *bool `json:"lastSeenOnline,omitempty"` // nil = unbekannt
LastSeenOnlineAt string `json:"lastSeenOnlineAt,omitempty"` // RFC3339Nano LastSeenOnlineAt string `json:"lastSeenOnlineAt,omitempty"` // RFC3339Nano
ProfileImageURL string `json:"profileImageUrl,omitempty"`
ProfileImageCached string `json:"profileImageCached,omitempty"` // z.B. /api/models/image?id=...
ProfileImageUpdatedAt string `json:"profileImageUpdatedAt,omitempty"` // RFC3339Nano
Watching bool `json:"watching"` Watching bool `json:"watching"`
Favorite bool `json:"favorite"` Favorite bool `json:"favorite"`
Hot bool `json:"hot"` Hot bool `json:"hot"`
@ -60,9 +65,9 @@ type ParsedModelDTO struct {
} }
type ModelFlagsPatch struct { type ModelFlagsPatch struct {
Host string `json:"host,omitempty"` // ✅ neu Host string `json:"host,omitempty"`
ModelKey string `json:"modelKey,omitempty"` // ✅ wenn id fehlt ModelKey string `json:"modelKey,omitempty"`
ID string `json:"id,omitempty"` // ✅ optional ID string `json:"id,omitempty"`
Watched *bool `json:"watched,omitempty"` Watched *bool `json:"watched,omitempty"`
Favorite *bool `json:"favorite,omitempty"` Favorite *bool `json:"favorite,omitempty"`
@ -149,8 +154,6 @@ ON CONFLICT(id) DO UPDATE SET
// EnsureByModelKey: // EnsureByModelKey:
// - liefert ein bestehendes Model (best match) wenn vorhanden // - liefert ein bestehendes Model (best match) wenn vorhanden
// - sonst legt es ein "manual" Model ohne URL an (Input=modelKey, IsURL=false) // - sonst legt es ein "manual" Model ohne URL an (Input=modelKey, IsURL=false)
// Dadurch funktionieren QuickActions (Like/Favorite) auch bei fertigen Videos,
// bei denen keine SourceURL mehr vorhanden ist.
func (s *ModelStore) EnsureByModelKey(modelKey string) (StoredModel, error) { func (s *ModelStore) EnsureByModelKey(modelKey string) (StoredModel, error) {
if err := s.ensureInit(); err != nil { if err := s.ensureInit(); err != nil {
return StoredModel{}, err return StoredModel{}, err
@ -161,8 +164,6 @@ func (s *ModelStore) EnsureByModelKey(modelKey string) (StoredModel, error) {
return StoredModel{}, errors.New("modelKey fehlt") return StoredModel{}, errors.New("modelKey fehlt")
} }
// Erst schauen ob es das Model schon gibt (egal welcher Host)
// Erst schauen ob es das Model schon gibt (egal welcher Host)
var existingID string var existingID string
err := s.db.QueryRow(` err := s.db.QueryRow(`
SELECT id SELECT id
@ -183,7 +184,6 @@ func (s *ModelStore) EnsureByModelKey(modelKey string) (StoredModel, error) {
return StoredModel{}, err return StoredModel{}, err
} }
// Neu anlegen als "manual" (is_url = 0), input = modelKey (NOT NULL)
now := time.Now().UTC().Format(time.RFC3339Nano) now := time.Now().UTC().Format(time.RFC3339Nano)
id := canonicalID("", key) id := canonicalID("", key)
@ -260,8 +260,7 @@ WHERE lower(trim(host)) = 'chaturbate.com'
} }
// Backwards compatible: // Backwards compatible:
// - wenn du ".json" übergibst (wie aktuell in main.go), wird daraus automatisch ".db" // - wenn du ".json" übergibst, wird daraus automatisch ".db"
// und die JSON-Datei wird als Legacy-Quelle für die 1x Migration genutzt.
func NewModelStore(path string) *ModelStore { func NewModelStore(path string) *ModelStore {
path = strings.TrimSpace(path) path = strings.TrimSpace(path)
@ -271,7 +270,7 @@ func NewModelStore(path string) *ModelStore {
if strings.HasSuffix(lower, ".json") { if strings.HasSuffix(lower, ".json") {
legacy = path legacy = path
dbPath = strings.TrimSuffix(path, filepath.Ext(path)) + ".db" // z.B. models_store.db dbPath = strings.TrimSuffix(path, filepath.Ext(path)) + ".db"
} else if strings.HasSuffix(lower, ".db") || strings.HasSuffix(lower, ".sqlite") || strings.HasSuffix(lower, ".sqlite3") { } else if strings.HasSuffix(lower, ".db") || strings.HasSuffix(lower, ".sqlite") || strings.HasSuffix(lower, ".sqlite3") {
legacy = filepath.Join(filepath.Dir(path), "models_store.json") legacy = filepath.Join(filepath.Dir(path), "models_store.json")
} }
@ -282,8 +281,6 @@ func NewModelStore(path string) *ModelStore {
} }
} }
// main.go ruft aktuell store.Load() auf :contentReference[oaicite:4]{index=4}
// -> wir lassen Load() als Alias für Init() drin.
func (s *ModelStore) Load() error { return s.ensureInit() } func (s *ModelStore) Load() error { return s.ensureInit() }
func (s *ModelStore) ensureInit() error { func (s *ModelStore) ensureInit() error {
@ -305,17 +302,14 @@ func (s *ModelStore) init() error {
if err != nil { if err != nil {
return err return err
} }
// SQLite am besten single-conn im Server-Prozess
db.SetMaxOpenConns(5) db.SetMaxOpenConns(5)
db.SetMaxIdleConns(5) db.SetMaxIdleConns(5)
_, _ = db.Exec(`PRAGMA busy_timeout = 2500;`) _, _ = db.Exec(`PRAGMA busy_timeout = 2500;`)
// Pragmas (einzeln ausführen)
_, _ = db.Exec(`PRAGMA foreign_keys = ON;`) _, _ = db.Exec(`PRAGMA foreign_keys = ON;`)
_, _ = db.Exec(`PRAGMA journal_mode = WAL;`) _, _ = db.Exec(`PRAGMA journal_mode = WAL;`)
_, _ = db.Exec(`PRAGMA synchronous = NORMAL;`) _, _ = db.Exec(`PRAGMA synchronous = NORMAL;`)
// ✅ zuerst Schema/Columns auf "db" erstellen
if err := createModelsSchema(db); err != nil { if err := createModelsSchema(db); err != nil {
_ = db.Close() _ = db.Close()
return err return err
@ -325,17 +319,14 @@ func (s *ModelStore) init() error {
return err return err
} }
// ✅ erst danach in den Store übernehmen
s.db = db s.db = db
// 1x Migration: wenn DB leer ist und Legacy JSON existiert
if s.legacyJSONPath != "" { if s.legacyJSONPath != "" {
if err := s.migrateFromJSONIfEmpty(); err != nil { if err := s.migrateFromJSONIfEmpty(); err != nil {
return err return err
} }
} }
// ✅ beim Einlesen normalisieren
if err := s.normalizeNameOnlyChaturbate(); err != nil { if err := s.normalizeNameOnlyChaturbate(); err != nil {
return err return err
} }
@ -358,6 +349,11 @@ CREATE TABLE IF NOT EXISTS models (
biocontext_json TEXT, biocontext_json TEXT,
biocontext_fetched_at TEXT, biocontext_fetched_at TEXT,
profile_image_url TEXT,
profile_image_mime TEXT,
profile_image_blob BLOB,
profile_image_updated_at TEXT,
last_seen_online INTEGER NULL, -- NULL/0/1 last_seen_online INTEGER NULL, -- NULL/0/1
last_seen_online_at TEXT, last_seen_online_at TEXT,
@ -411,7 +407,7 @@ func ensureModelsColumns(db *sql.DB) error {
} }
} }
// ✅ Biocontext (persistente Bio-Infos) // ✅ Biocontext
if !cols["biocontext_json"] { if !cols["biocontext_json"] {
if _, err := db.Exec(`ALTER TABLE models ADD COLUMN biocontext_json TEXT;`); err != nil { if _, err := db.Exec(`ALTER TABLE models ADD COLUMN biocontext_json TEXT;`); err != nil {
return err return err
@ -423,7 +419,29 @@ func ensureModelsColumns(db *sql.DB) error {
} }
} }
// ✅ Last seen online/offline (persistente Presence-Infos) // ✅ Profile image columns
if !cols["profile_image_url"] {
if _, err := db.Exec(`ALTER TABLE models ADD COLUMN profile_image_url TEXT;`); err != nil {
return err
}
}
if !cols["profile_image_mime"] {
if _, err := db.Exec(`ALTER TABLE models ADD COLUMN profile_image_mime TEXT;`); err != nil {
return err
}
}
if !cols["profile_image_blob"] {
if _, err := db.Exec(`ALTER TABLE models ADD COLUMN profile_image_blob BLOB;`); err != nil {
return err
}
}
if !cols["profile_image_updated_at"] {
if _, err := db.Exec(`ALTER TABLE models ADD COLUMN profile_image_updated_at TEXT;`); err != nil {
return err
}
}
// ✅ Last seen online/offline
if !cols["last_seen_online"] { if !cols["last_seen_online"] {
if _, err := db.Exec(`ALTER TABLE models ADD COLUMN last_seen_online INTEGER NULL;`); err != nil { if _, err := db.Exec(`ALTER TABLE models ADD COLUMN last_seen_online INTEGER NULL;`); err != nil {
return err return err
@ -475,16 +493,6 @@ func ptrLikedFromNull(n sql.NullInt64) *bool {
return &v return &v
} }
func nullBoolToNullInt64(p *bool) sql.NullInt64 {
if p == nil {
return sql.NullInt64{Valid: false}
}
if *p {
return sql.NullInt64{Valid: true, Int64: 1}
}
return sql.NullInt64{Valid: true, Int64: 0}
}
func ptrBoolFromNullInt64(n sql.NullInt64) *bool { func ptrBoolFromNullInt64(n sql.NullInt64) *bool {
if !n.Valid { if !n.Valid {
return nil return nil
@ -493,10 +501,150 @@ func ptrBoolFromNullInt64(n sql.NullInt64) *bool {
return &v return &v
} }
// --- Biocontext Cache (persistente Bio-Infos aus Chaturbate) --- // --- Profile image cache ---
// SetProfileImage speichert Bild-URL + MIME + Blob.
// Legt den Datensatz bei Bedarf minimal an.
func (s *ModelStore) SetProfileImage(host, modelKey, sourceURL, mime string, data []byte, updatedAt string) error {
if err := s.ensureInit(); err != nil {
return err
}
host = canonicalHost(host)
key := strings.TrimSpace(modelKey)
if host == "" || key == "" {
return errors.New("host/modelKey fehlt")
}
if len(data) == 0 {
return errors.New("image data fehlt")
}
src := strings.TrimSpace(sourceURL)
mime = strings.TrimSpace(strings.ToLower(mime))
if mime == "" || mime == "application/octet-stream" {
detected := http.DetectContentType(data)
if strings.TrimSpace(detected) != "" {
mime = detected
}
}
if mime == "" {
mime = "image/jpeg"
}
ts := strings.TrimSpace(updatedAt)
if ts == "" {
ts = time.Now().UTC().Format(time.RFC3339Nano)
}
now := time.Now().UTC().Format(time.RFC3339Nano)
s.mu.Lock()
defer s.mu.Unlock()
// Erst Update versuchen
res, err := s.db.Exec(`
UPDATE models
SET profile_image_url=?, profile_image_mime=?, profile_image_blob=?, profile_image_updated_at=?, updated_at=?
WHERE lower(trim(host)) = lower(trim(?))
AND lower(trim(model_key)) = lower(trim(?));
`, src, mime, data, ts, now, host, key)
if err != nil {
return err
}
aff, _ := res.RowsAffected()
if aff > 0 {
return nil
}
// Kein Auto-Insert: Profilbild nur für bereits bestehende Models speichern.
return nil
}
// SetProfileImageURLOnly speichert nur die letzte bekannte Bild-URL (+Zeit), ohne Blob.
// Praktisch als Fallback, wenn Download fehlschlägt.
func (s *ModelStore) SetProfileImageURLOnly(host, modelKey, sourceURL, updatedAt string) error {
if err := s.ensureInit(); err != nil {
return err
}
host = canonicalHost(host)
key := strings.TrimSpace(modelKey)
src := strings.TrimSpace(sourceURL)
if host == "" || key == "" {
return errors.New("host/modelKey fehlt")
}
if src == "" {
return nil
}
ts := strings.TrimSpace(updatedAt)
if ts == "" {
ts = time.Now().UTC().Format(time.RFC3339Nano)
}
now := time.Now().UTC().Format(time.RFC3339Nano)
s.mu.Lock()
defer s.mu.Unlock()
res, err := s.db.Exec(`
UPDATE models
SET profile_image_url=?, profile_image_updated_at=?, updated_at=?
WHERE lower(trim(host)) = lower(trim(?))
AND lower(trim(model_key)) = lower(trim(?));
`, src, ts, now, host, key)
if err != nil {
return err
}
aff, _ := res.RowsAffected()
if aff > 0 {
return nil
}
// Kein Auto-Insert: Bild-URL nur für bereits bestehende Models speichern.
return nil
}
func (s *ModelStore) GetProfileImageByID(id string) (mime string, data []byte, ok bool, err error) {
if err := s.ensureInit(); err != nil {
return "", nil, false, err
}
id = strings.TrimSpace(id)
if id == "" {
return "", nil, false, errors.New("id fehlt")
}
var mimeNS sql.NullString
var blob []byte
err = s.db.QueryRow(`
SELECT profile_image_mime, profile_image_blob
FROM models
WHERE id = ?
LIMIT 1;
`, id).Scan(&mimeNS, &blob)
if errors.Is(err, sql.ErrNoRows) {
return "", nil, false, nil
}
if err != nil {
return "", nil, false, err
}
if len(blob) == 0 {
return "", nil, false, nil
}
m := strings.TrimSpace(mimeNS.String)
if m == "" {
m = http.DetectContentType(blob)
if m == "" {
m = "application/octet-stream"
}
}
return m, blob, true, nil
}
// --- Biocontext Cache ---
// GetBioContext liefert das zuletzt gespeicherte Biocontext-JSON (+ Zeitstempel).
// ok=false wenn nichts gespeichert ist.
func (s *ModelStore) GetBioContext(host, modelKey string) (jsonStr string, fetchedAt string, ok bool, err error) { func (s *ModelStore) GetBioContext(host, modelKey string) (jsonStr string, fetchedAt string, ok bool, err error) {
if err := s.ensureInit(); err != nil { if err := s.ensureInit(); err != nil {
return "", "", false, err return "", "", false, err
@ -531,8 +679,6 @@ func (s *ModelStore) GetBioContext(host, modelKey string) (jsonStr string, fetch
return val, strings.TrimSpace(ts.String), true, nil return val, strings.TrimSpace(ts.String), true, nil
} }
// SetBioContext speichert/aktualisiert das Biocontext-JSON dauerhaft in der DB.
// Es legt das Model (host+modelKey) bei Bedarf minimal an.
func (s *ModelStore) SetBioContext(host, modelKey, jsonStr, fetchedAt string) error { func (s *ModelStore) SetBioContext(host, modelKey, jsonStr, fetchedAt string) error {
if err := s.ensureInit(); err != nil { if err := s.ensureInit(); err != nil {
return err return err
@ -551,11 +697,11 @@ func (s *ModelStore) SetBioContext(host, modelKey, jsonStr, fetchedAt string) er
defer s.mu.Unlock() defer s.mu.Unlock()
res, err := s.db.Exec(` res, err := s.db.Exec(`
UPDATE models UPDATE models
SET biocontext_json=?, biocontext_fetched_at=?, updated_at=? SET biocontext_json=?, biocontext_fetched_at=?, updated_at=?
WHERE lower(trim(host)) = lower(trim(?)) WHERE lower(trim(host)) = lower(trim(?))
AND lower(trim(model_key)) = lower(trim(?)); AND lower(trim(model_key)) = lower(trim(?));
`, js, ts, now, host, key) `, js, ts, now, host, key)
if err != nil { if err != nil {
return err return err
} }
@ -565,34 +711,11 @@ func (s *ModelStore) SetBioContext(host, modelKey, jsonStr, fetchedAt string) er
return nil return nil
} }
// Model existiert noch nicht -> minimal anlegen (als URL) // Kein Auto-Insert: Biocontext nur für vorhandene Models.
id := canonicalID(host, key) return nil
input := "https://" + host + "/" + key + "/"
path := "/" + key + "/"
_, err = s.db.Exec(`
INSERT INTO models (
id,input,is_url,host,path,model_key,
tags,last_stream,
biocontext_json,biocontext_fetched_at,
watching,favorite,hot,keep,liked,
created_at,updated_at
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(id) DO UPDATE SET
biocontext_json=excluded.biocontext_json,
biocontext_fetched_at=excluded.biocontext_fetched_at,
updated_at=excluded.updated_at;
`, id, input, int64(1), host, path, key,
"", "",
js, ts,
int64(0), int64(0), int64(0), int64(0), nil,
now, now,
)
return err
} }
// SetLastSeenOnline speichert den zuletzt bekannten Online/Offline-Status (+ Zeit) // SetLastSeenOnline speichert Online/Offline Status
// dauerhaft in der DB. Legt das Model (host+modelKey) bei Bedarf minimal an.
func (s *ModelStore) SetLastSeenOnline(host, modelKey string, online bool, seenAt string) error { func (s *ModelStore) SetLastSeenOnline(host, modelKey string, online bool, seenAt string) error {
if err := s.ensureInit(); err != nil { if err := s.ensureInit(); err != nil {
return err return err
@ -617,13 +740,12 @@ func (s *ModelStore) SetLastSeenOnline(host, modelKey string, online bool, seenA
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
// Erst versuchen, vorhandenes Model zu aktualisieren
res, err := s.db.Exec(` res, err := s.db.Exec(`
UPDATE models UPDATE models
SET last_seen_online=?, last_seen_online_at=?, updated_at=? SET last_seen_online=?, last_seen_online_at=?, updated_at=?
WHERE lower(trim(host)) = lower(trim(?)) WHERE lower(trim(host)) = lower(trim(?))
AND lower(trim(model_key)) = lower(trim(?)); AND lower(trim(model_key)) = lower(trim(?));
`, onlineArg, ts, now, host, key) `, onlineArg, ts, now, host, key)
if err != nil { if err != nil {
return err return err
} }
@ -633,37 +755,12 @@ func (s *ModelStore) SetLastSeenOnline(host, modelKey string, online bool, seenA
return nil return nil
} }
// Falls noch kein Model existiert: minimal anlegen // Wichtig: Keine Auto-Erzeugung durch Online-Poller.
id := canonicalID(host, key) // Nur bereits manuell/importiert vorhandene Models werden aktualisiert.
input := "https://" + host + "/" + key + "/" return nil
path := "/" + key + "/"
_, err = s.db.Exec(`
INSERT INTO models (
id,input,is_url,host,path,model_key,
tags,last_stream,
biocontext_json,biocontext_fetched_at,
last_seen_online,last_seen_online_at,
watching,favorite,hot,keep,liked,
created_at,updated_at
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(id) DO UPDATE SET
last_seen_online=excluded.last_seen_online,
last_seen_online_at=excluded.last_seen_online_at,
updated_at=excluded.updated_at;
`,
id, input, int64(1), host, path, key,
"", "",
nil, nil,
onlineArg, ts,
int64(0), int64(0), int64(0), int64(0), nil,
now, now,
)
return err
} }
func (s *ModelStore) migrateFromJSONIfEmpty() error { func (s *ModelStore) migrateFromJSONIfEmpty() error {
// DB leer?
var cnt int var cnt int
if err := s.db.QueryRow(`SELECT COUNT(1) FROM models;`).Scan(&cnt); err != nil { if err := s.db.QueryRow(`SELECT COUNT(1) FROM models;`).Scan(&cnt); err != nil {
return err return err
@ -672,7 +769,6 @@ func (s *ModelStore) migrateFromJSONIfEmpty() error {
return nil return nil
} }
// Legacy JSON vorhanden?
b, err := os.ReadFile(s.legacyJSONPath) b, err := os.ReadFile(s.legacyJSONPath)
if err != nil { if err != nil {
if errors.Is(err, os.ErrNotExist) { if errors.Is(err, os.ErrNotExist) {
@ -698,19 +794,22 @@ func (s *ModelStore) migrateFromJSONIfEmpty() error {
} }
defer func() { _ = tx.Rollback() }() defer func() { _ = tx.Rollback() }()
// ✅ FIX: 15 Spalten => 15 Platzhalter
stmt, err := tx.Prepare(` stmt, err := tx.Prepare(`
INSERT INTO models ( INSERT INTO models (
id,input,is_url,host,path,model_key, id,input,is_url,host,path,model_key,
tags,last_stream, tags,last_stream,
watching,favorite,hot,keep,liked, watching,favorite,hot,keep,liked,
created_at,updated_at created_at,updated_at
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?) ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(id) DO UPDATE SET ON CONFLICT(id) DO UPDATE SET
input=excluded.input, input=excluded.input,
is_url=excluded.is_url, is_url=excluded.is_url,
host=excluded.host, host=excluded.host,
path=excluded.path, path=excluded.path,
model_key=excluded.model_key, model_key=excluded.model_key,
tags=excluded.tags,
last_stream=excluded.last_stream,
updated_at=excluded.updated_at; updated_at=excluded.updated_at;
`) `)
if err != nil { if err != nil {
@ -727,7 +826,6 @@ ON CONFLICT(id) DO UPDATE SET
continue continue
} }
// alte IDs (oft nur modelKey) werden auf host:modelKey normalisiert
id := canonicalID(host, modelKey) id := canonicalID(host, modelKey)
created := strings.TrimSpace(m.CreatedAt) created := strings.TrimSpace(m.CreatedAt)
@ -754,6 +852,8 @@ ON CONFLICT(id) DO UPDATE SET
host, host,
m.Path, m.Path,
modelKey, modelKey,
m.Tags,
m.LastStream,
boolToInt(m.Watching), boolToInt(m.Watching),
boolToInt(m.Favorite), boolToInt(m.Favorite),
boolToInt(m.Hot), boolToInt(m.Hot),
@ -775,7 +875,6 @@ func bytesTrimSpace(b []byte) []byte {
} }
func (s *ModelStore) normalizeNameOnlyChaturbate() error { func (s *ModelStore) normalizeNameOnlyChaturbate() error {
// Kandidaten: is_url=0 UND input==model_key UND host leer oder schon chaturbate
rows, err := s.db.Query(` rows, err := s.db.Query(`
SELECT SELECT
id, model_key, id, model_key,
@ -835,7 +934,6 @@ WHERE is_url = 0
newInput := "https://" + host + "/" + it.key + "/" newInput := "https://" + host + "/" + it.key + "/"
newPath := "/" + it.key + "/" newPath := "/" + it.key + "/"
// Ziel-Datensatz: wenn bereits chaturbate.com:<key> existiert, dorthin mergen
var targetID string var targetID string
err := tx.QueryRow(` err := tx.QueryRow(`
SELECT id SELECT id
@ -859,7 +957,6 @@ LIMIT 1;
likedArg = nil likedArg = nil
} }
// Wenn es keinen Ziel-Datensatz gibt: neu anlegen mit canonical ID
if targetID == "" { if targetID == "" {
targetID = canonicalID(host, it.key) targetID = canonicalID(host, it.key)
@ -880,7 +977,6 @@ INSERT INTO models (
return err return err
} }
} else { } else {
// Ziel existiert: Flags mergen + fehlende Felder auffüllen
_, err = tx.Exec(` _, err = tx.Exec(`
UPDATE models SET UPDATE models SET
input = CASE input = CASE
@ -915,7 +1011,6 @@ WHERE id = ?;
} }
} }
// alten "manual" Datensatz löschen (nur wenn anderer Ziel-Datensatz)
if it.oldID != targetID { if it.oldID != targetID {
if _, err := tx.Exec(`DELETE FROM models WHERE id=?;`, it.oldID); err != nil { if _, err := tx.Exec(`DELETE FROM models WHERE id=?;`, it.oldID); err != nil {
return err return err
@ -932,15 +1027,18 @@ func (s *ModelStore) List() []StoredModel {
} }
rows, err := s.db.Query(` rows, err := s.db.Query(`
SELECT SELECT
id,input,is_url,host,path,model_key, id,input,is_url,host,path,model_key,
tags, COALESCE(last_stream,''), tags, COALESCE(last_stream,''),
last_seen_online, COALESCE(last_seen_online_at,''), last_seen_online, COALESCE(last_seen_online_at,''),
watching,favorite,hot,keep,liked, COALESCE(profile_image_url,''),
created_at,updated_at COALESCE(profile_image_updated_at,''),
FROM models CASE WHEN profile_image_blob IS NOT NULL AND length(profile_image_blob) > 0 THEN 1 ELSE 0 END as has_profile_image,
ORDER BY updated_at DESC; watching,favorite,hot,keep,liked,
`) created_at,updated_at
FROM models
ORDER BY updated_at DESC;
`)
if err != nil { if err != nil {
return []StoredModel{} return []StoredModel{}
} }
@ -950,23 +1048,31 @@ func (s *ModelStore) List() []StoredModel {
for rows.Next() { for rows.Next() {
var ( var (
id, input, host, path, modelKey, tags, lastStream, createdAt, updatedAt string id, input, host, path, modelKey, tags, lastStream string
isURL, watching, favorite, hot, keep int64 createdAt, updatedAt string
liked sql.NullInt64
lastSeenOnline sql.NullInt64 isURL, watching, favorite, hot, keep int64
lastSeenOnlineAt string liked sql.NullInt64
lastSeenOnline sql.NullInt64
lastSeenOnlineAt string
profileImageURL string
profileImageUpdatedAt string
hasProfileImage int64
) )
if err := rows.Scan( if err := rows.Scan(
&id, &input, &isURL, &host, &path, &modelKey, &id, &input, &isURL, &host, &path, &modelKey,
&tags, &lastStream, &tags, &lastStream,
&lastSeenOnline, &lastSeenOnlineAt, &lastSeenOnline, &lastSeenOnlineAt,
&profileImageURL, &profileImageUpdatedAt, &hasProfileImage,
&watching, &favorite, &hot, &keep, &liked, &watching, &favorite, &hot, &keep, &liked,
&createdAt, &updatedAt, &createdAt, &updatedAt,
); err != nil { ); err != nil {
continue continue
} }
out = append(out, StoredModel{ m := StoredModel{
ID: id, ID: id,
Input: input, Input: input,
IsURL: isURL != 0, IsURL: isURL != 0,
@ -984,7 +1090,16 @@ func (s *ModelStore) List() []StoredModel {
Liked: ptrLikedFromNull(liked), Liked: ptrLikedFromNull(liked),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
})
ProfileImageURL: profileImageURL,
ProfileImageUpdatedAt: profileImageUpdatedAt,
}
if hasProfileImage != 0 {
m.ProfileImageCached = "/api/models/image?id=" + url.QueryEscape(id)
}
out = append(out, m)
} }
return out return out
@ -1085,27 +1200,27 @@ func (s *ModelStore) UpsertFromParsed(p ParsedModelDTO) (StoredModel, error) {
defer s.mu.Unlock() defer s.mu.Unlock()
_, err = s.db.Exec(` _, err = s.db.Exec(`
INSERT INTO models ( INSERT INTO models (
id,input,is_url,host,path,model_key, id,input,is_url,host,path,model_key,
tags,last_stream, tags,last_stream,
watching,favorite,hot,keep,liked, watching,favorite,hot,keep,liked,
created_at,updated_at created_at,updated_at
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
ON CONFLICT(id) DO UPDATE SET ON CONFLICT(id) DO UPDATE SET
input=excluded.input, input=excluded.input,
is_url=excluded.is_url, is_url=excluded.is_url,
host=excluded.host, host=excluded.host,
path=excluded.path, path=excluded.path,
model_key=excluded.model_key, model_key=excluded.model_key,
updated_at=excluded.updated_at; updated_at=excluded.updated_at;
`, `,
id, id,
u.String(), u.String(),
int64(1), int64(1),
host, host,
p.Path, p.Path,
modelKey, modelKey,
"", "", // ✅ tags, last_stream "", "",
int64(0), int64(0), int64(0), int64(0), nil, int64(0), int64(0), int64(0), int64(0), nil,
now, now,
now, now,
@ -1128,7 +1243,6 @@ func (s *ModelStore) PatchFlags(patch ModelFlagsPatch) (StoredModel, error) {
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
// aktuelle Flags lesen
var ( var (
watching, favorite, hot, keep int64 watching, favorite, hot, keep int64
liked sql.NullInt64 liked sql.NullInt64
@ -1142,28 +1256,21 @@ func (s *ModelStore) PatchFlags(patch ModelFlagsPatch) (StoredModel, error) {
return StoredModel{}, err return StoredModel{}, err
} }
// ✅ watched -> watching (DB)
if patch.Watched != nil { if patch.Watched != nil {
watching = boolToInt(*patch.Watched) watching = boolToInt(*patch.Watched)
} }
if patch.Favorite != nil { if patch.Favorite != nil {
favorite = boolToInt(*patch.Favorite) favorite = boolToInt(*patch.Favorite)
} }
// ✅ liked ist true/false (kein ClearLiked mehr)
if patch.Liked != nil { if patch.Liked != nil {
liked = sql.NullInt64{Valid: true, Int64: boolToInt(*patch.Liked)} liked = sql.NullInt64{Valid: true, Int64: boolToInt(*patch.Liked)}
} }
// ✅ Exklusivität serverseitig (robust): // Exklusivität
// - liked=true => favorite=false
// - favorite=true => liked=false (nicht NULL)
if patch.Liked != nil && *patch.Liked { if patch.Liked != nil && *patch.Liked {
favorite = int64(0) favorite = int64(0)
} }
if patch.Favorite != nil && *patch.Favorite { if patch.Favorite != nil && *patch.Favorite {
// Wenn Frontend nicht explizit liked=true sendet, force liked=false
if patch.Liked == nil || !*patch.Liked { if patch.Liked == nil || !*patch.Liked {
liked = sql.NullInt64{Valid: true, Int64: 0} liked = sql.NullInt64{Valid: true, Int64: 0}
} }
@ -1225,7 +1332,6 @@ func (s *ModelStore) UpsertFromImport(p ParsedModelDTO, tags, lastStream string,
now := time.Now().UTC().Format(time.RFC3339Nano) now := time.Now().UTC().Format(time.RFC3339Nano)
// kind: "favorite" | "liked"
fav := int64(0) fav := int64(0)
var likedArg any = nil var likedArg any = nil
if kind == "favorite" { if kind == "favorite" {
@ -1238,7 +1344,6 @@ func (s *ModelStore) UpsertFromImport(p ParsedModelDTO, tags, lastStream string,
s.mu.Lock() s.mu.Lock()
defer s.mu.Unlock() defer s.mu.Unlock()
// exists?
inserted := false inserted := false
var dummy int var dummy int
err = s.db.QueryRow(`SELECT 1 FROM models WHERE id=? LIMIT 1;`, id).Scan(&dummy) err = s.db.QueryRow(`SELECT 1 FROM models WHERE id=? LIMIT 1;`, id).Scan(&dummy)
@ -1283,11 +1388,17 @@ ON CONFLICT(id) DO UPDATE SET
func (s *ModelStore) getByID(id string) (StoredModel, error) { func (s *ModelStore) getByID(id string) (StoredModel, error) {
var ( var (
input, host, path, modelKey, tags, lastStream, createdAt, updatedAt string input, host, path, modelKey, tags, lastStream string
isURL, watching, favorite, hot, keep int64 createdAt, updatedAt string
liked sql.NullInt64
lastSeenOnlineAt string isURL, watching, favorite, hot, keep int64
lastSeenOnline sql.NullInt64 liked sql.NullInt64
lastSeenOnline sql.NullInt64
lastSeenOnlineAt string
profileImageURL string
profileImageUpdatedAt string
hasProfileImage int64
) )
err := s.db.QueryRow(` err := s.db.QueryRow(`
@ -1295,6 +1406,9 @@ SELECT
input,is_url,host,path,model_key, input,is_url,host,path,model_key,
tags, COALESCE(last_stream,''), tags, COALESCE(last_stream,''),
last_seen_online, COALESCE(last_seen_online_at,''), last_seen_online, COALESCE(last_seen_online_at,''),
COALESCE(profile_image_url,''),
COALESCE(profile_image_updated_at,''),
CASE WHEN profile_image_blob IS NOT NULL AND length(profile_image_blob) > 0 THEN 1 ELSE 0 END as has_profile_image,
watching,favorite,hot,keep,liked, watching,favorite,hot,keep,liked,
created_at,updated_at created_at,updated_at
FROM models FROM models
@ -1303,6 +1417,7 @@ WHERE id=?;
&input, &isURL, &host, &path, &modelKey, &input, &isURL, &host, &path, &modelKey,
&tags, &lastStream, &tags, &lastStream,
&lastSeenOnline, &lastSeenOnlineAt, &lastSeenOnline, &lastSeenOnlineAt,
&profileImageURL, &profileImageUpdatedAt, &hasProfileImage,
&watching, &favorite, &hot, &keep, &liked, &watching, &favorite, &hot, &keep, &liked,
&createdAt, &updatedAt, &createdAt, &updatedAt,
) )
@ -1313,7 +1428,7 @@ WHERE id=?;
return StoredModel{}, err return StoredModel{}, err
} }
return StoredModel{ m := StoredModel{
ID: id, ID: id,
Input: input, Input: input,
IsURL: isURL != 0, IsURL: isURL != 0,
@ -1331,5 +1446,14 @@ WHERE id=?;
Liked: ptrLikedFromNull(liked), Liked: ptrLikedFromNull(liked),
CreatedAt: createdAt, CreatedAt: createdAt,
UpdatedAt: updatedAt, UpdatedAt: updatedAt,
}, nil
ProfileImageURL: profileImageURL,
ProfileImageUpdatedAt: profileImageUpdatedAt,
}
if hasProfileImage != 0 {
m.ProfileImageCached = "/api/models/image?id=" + url.QueryEscape(id)
}
return m, nil
} }

Binary file not shown.

View File

@ -12,6 +12,7 @@ import (
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"reflect"
"runtime" "runtime"
"sort" "sort"
"strconv" "strconv"
@ -35,15 +36,25 @@ type doneListResponse struct {
PageSize int `json:"pageSize,omitempty"` PageSize int `json:"pageSize,omitempty"`
} }
type previewSpriteMetaResp struct {
Exists bool `json:"exists"`
Path string `json:"path,omitempty"`
Count int `json:"count,omitempty"`
Cols int `json:"cols,omitempty"`
Rows int `json:"rows,omitempty"`
StepSeconds float64 `json:"stepSeconds,omitempty"`
}
type doneMetaFileResp struct { type doneMetaFileResp struct {
File string `json:"file"` File string `json:"file"`
MetaExists bool `json:"metaExists"` MetaExists bool `json:"metaExists"`
DurationSeconds float64 `json:"durationSeconds,omitempty"` DurationSeconds float64 `json:"durationSeconds,omitempty"`
Width int `json:"width,omitempty"` Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"` Height int `json:"height,omitempty"`
FPS float64 `json:"fps,omitempty"` FPS float64 `json:"fps,omitempty"`
SourceURL string `json:"sourceUrl,omitempty"` SourceURL string `json:"sourceUrl,omitempty"`
Error string `json:"error,omitempty"` PreviewSprite previewSpriteMetaResp `json:"previewSprite"`
Error string `json:"error,omitempty"`
} }
type doneMetaResp struct { type doneMetaResp struct {
@ -121,6 +132,383 @@ func isSafeBasename(name string) bool {
return filepath.Base(name) == name return filepath.Base(name) == name
} }
func intFromAny(v any) (int, bool) {
switch x := v.(type) {
case int:
return x, true
case int8:
return int(x), true
case int16:
return int(x), true
case int32:
return int(x), true
case int64:
return int(x), true
case uint:
return int(x), true
case uint8:
return int(x), true
case uint16:
return int(x), true
case uint32:
return int(x), true
case uint64:
return int(x), true
case float32:
return int(x), true
case float64:
return int(x), true
case json.Number:
if i, err := x.Int64(); err == nil {
return int(i), true
}
if f, err := x.Float64(); err == nil {
return int(f), true
}
case string:
s := strings.TrimSpace(x)
if s == "" {
return 0, false
}
if i, err := strconv.Atoi(s); err == nil {
return i, true
}
}
return 0, false
}
func floatFromAny(v any) (float64, bool) {
switch x := v.(type) {
case float32:
return float64(x), true
case float64:
return x, true
case int:
return float64(x), true
case int8:
return float64(x), true
case int16:
return float64(x), true
case int32:
return float64(x), true
case int64:
return float64(x), true
case uint:
return float64(x), true
case uint8:
return float64(x), true
case uint16:
return float64(x), true
case uint32:
return float64(x), true
case uint64:
return float64(x), true
case json.Number:
if f, err := x.Float64(); err == nil {
return f, true
}
case string:
s := strings.TrimSpace(x)
if s == "" {
return 0, false
}
if f, err := strconv.ParseFloat(s, 64); err == nil {
return f, true
}
}
return 0, false
}
type previewSpriteMetaFileInfo struct {
Count int
Cols int
Rows int
StepSeconds float64
}
func readPreviewSpriteMetaFromMetaFile(metaPath string) (previewSpriteMetaFileInfo, bool) {
var out previewSpriteMetaFileInfo
b, err := os.ReadFile(metaPath)
if err != nil || len(b) == 0 {
return out, false
}
var m map[string]any
dec := json.NewDecoder(strings.NewReader(string(b)))
dec.UseNumber()
if err := dec.Decode(&m); err != nil {
return out, false
}
ps, ok := m["previewSprite"].(map[string]any)
if !ok || ps == nil {
return out, false
}
if n, ok := intFromAny(ps["count"]); ok && n > 0 {
out.Count = n
} else if n, ok := intFromAny(ps["frames"]); ok && n > 0 {
out.Count = n
} else if n, ok := intFromAny(ps["imageCount"]); ok && n > 0 {
out.Count = n
}
if n, ok := intFromAny(ps["cols"]); ok && n > 0 {
out.Cols = n
}
if n, ok := intFromAny(ps["rows"]); ok && n > 0 {
out.Rows = n
}
if f, ok := floatFromAny(ps["stepSeconds"]); ok && f > 0 {
out.StepSeconds = f
} else if f, ok := floatFromAny(ps["step"]); ok && f > 0 {
out.StepSeconds = f
} else if f, ok := floatFromAny(ps["intervalSeconds"]); ok && f > 0 {
out.StepSeconds = f
}
// gültig, wenn mindestens count oder grid vorhanden ist
if out.Count > 0 || (out.Cols > 0 && out.Rows > 0) {
return out, true
}
return out, false
}
func previewSpriteTruthForID(id string) previewSpriteMetaResp {
out := previewSpriteMetaResp{Exists: false}
id = strings.TrimSpace(id)
if id == "" || strings.Contains(id, "/") || strings.Contains(id, "\\") {
return out
}
metaPath, err := generatedMetaFile(id)
if err != nil || strings.TrimSpace(metaPath) == "" {
return out
}
genDir := filepath.Dir(metaPath)
spriteFile := filepath.Join(genDir, "preview-sprite.webp")
fi, err := os.Stat(spriteFile)
if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 {
return out
}
// ✅ echte Datei existiert
out.Exists = true
out.Path = "/api/preview-sprite/" + url.PathEscape(id)
// Meta-Felder best-effort aus meta.json lesen
if ps, ok := readPreviewSpriteMetaFromMetaFile(metaPath); ok {
if ps.Count > 0 {
out.Count = ps.Count
}
if ps.Cols > 0 {
out.Cols = ps.Cols
}
if ps.Rows > 0 {
out.Rows = ps.Rows
}
if ps.StepSeconds > 0 {
out.StepSeconds = ps.StepSeconds
}
}
return out
}
func applyPreviewSpriteTruthToDoneMetaResp(id string, resp *doneMetaFileResp) {
if resp == nil {
return
}
resp.PreviewSprite = previewSpriteTruthForID(id)
}
func metaMapFromAny(v any) map[string]any {
out := map[string]any{}
switch x := v.(type) {
case nil:
return out
case map[string]any:
for k, val := range x {
out[k] = val
}
return out
case string:
s := strings.TrimSpace(x)
if s == "" {
return out
}
var m map[string]any
dec := json.NewDecoder(strings.NewReader(s))
dec.UseNumber()
if err := dec.Decode(&m); err == nil && m != nil {
return m
}
return out
case []byte:
if len(x) == 0 {
return out
}
var m map[string]any
dec := json.NewDecoder(strings.NewReader(string(x)))
dec.UseNumber()
if err := dec.Decode(&m); err == nil && m != nil {
return m
}
return out
case json.RawMessage:
if len(x) == 0 {
return out
}
var m map[string]any
dec := json.NewDecoder(strings.NewReader(string(x)))
dec.UseNumber()
if err := dec.Decode(&m); err == nil && m != nil {
return m
}
return out
default:
// best effort: unbekannten Typ in map re-hydraten
b, err := json.Marshal(x)
if err != nil || len(b) == 0 {
return out
}
var m map[string]any
dec := json.NewDecoder(strings.NewReader(string(b)))
dec.UseNumber()
if err := dec.Decode(&m); err == nil && m != nil {
return m
}
return out
}
}
func setStructFieldJSONMap(fv reflect.Value, m map[string]any) {
if !fv.IsValid() || !fv.CanSet() {
return
}
// JSON serialisieren (für string / []byte / typed map / struct)
b, err := json.Marshal(m)
if err != nil {
return
}
switch fv.Kind() {
case reflect.Interface:
// interface{} / any -> direkt map setzen
fv.Set(reflect.ValueOf(m))
return
case reflect.String:
fv.SetString(string(b))
return
case reflect.Slice:
// []byte / json.RawMessage
if fv.Type().Elem().Kind() == reflect.Uint8 {
fv.SetBytes(b)
return
}
}
// Fallback: in den echten Feldtyp unmarshaln
ptr := reflect.New(fv.Type())
if err := json.Unmarshal(b, ptr.Interface()); err == nil {
fv.Set(ptr.Elem())
}
}
func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) {
if j == nil {
return
}
// ID aus Output ableiten (canonical: ohne HOT, ohne Ext)
outPath := strings.TrimSpace(j.Output)
if outPath == "" {
return
}
base := filepath.Base(outPath)
id := stripHotPrefix(strings.TrimSuffix(base, filepath.Ext(base)))
id = strings.TrimSpace(id)
ps := previewSpriteTruthForID(id)
// per Reflection auf Feld "Meta" zugreifen (robust gegen Meta-Typ)
rv := reflect.ValueOf(j)
if rv.Kind() != reflect.Pointer || rv.IsNil() {
return
}
sv := rv.Elem()
if !sv.IsValid() || sv.Kind() != reflect.Struct {
return
}
fv := sv.FieldByName("Meta")
if !fv.IsValid() || !fv.CanSet() {
// Falls RecordJob kein Meta-Feld hat -> nichts zu tun
return
}
var raw any
switch fv.Kind() {
case reflect.Interface:
if fv.IsNil() {
raw = nil
} else {
raw = fv.Interface()
}
default:
raw = fv.Interface()
}
meta := metaMapFromAny(raw)
if meta == nil {
meta = map[string]any{}
}
// ✅ Legacy/Fallback Felder killen (falls vorhanden)
delete(meta, "previewScrubberPath")
delete(meta, "previewScrubberCount")
// ✅ previewSprite hart mit echter Dateiwahrheit überschreiben
psMap := map[string]any{
"exists": ps.Exists,
}
if ps.Exists {
psMap["path"] = ps.Path
if ps.Count > 0 {
psMap["count"] = ps.Count
}
if ps.Cols > 0 {
psMap["cols"] = ps.Cols
}
if ps.Rows > 0 {
psMap["rows"] = ps.Rows
}
if ps.StepSeconds > 0 {
psMap["stepSeconds"] = ps.StepSeconds
}
}
meta["previewSprite"] = psMap
setStructFieldJSONMap(fv, meta)
}
func recordList(w http.ResponseWriter, r *http.Request) { func recordList(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet { if r.Method != http.MethodGet {
http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed) http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed)
@ -551,6 +939,8 @@ func recordStatus(w http.ResponseWriter, r *http.Request) {
return return
} }
applyPreviewSpriteTruthToRecordJobMeta(job)
json.NewEncoder(w).Encode(job) json.NewEncoder(w).Encode(job)
} }
@ -949,22 +1339,14 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) {
// ✅ best-effort meta.json erzeugen // ✅ best-effort meta.json erzeugen
ensureMetaJSONForPlayback(r.Context(), outPath) ensureMetaJSONForPlayback(r.Context(), outPath)
// Response-Shape: bewusst "fertig" fürs Frontend
type doneMetaFileResp struct {
File string `json:"file"`
MetaExists bool `json:"metaExists"`
DurationSeconds float64 `json:"durationSeconds,omitempty"`
Width int `json:"width,omitempty"`
Height int `json:"height,omitempty"`
FPS float64 `json:"fps,omitempty"`
SourceURL string `json:"sourceUrl,omitempty"`
Error string `json:"error,omitempty"`
}
resp := doneMetaFileResp{File: filepath.Base(outPath)} resp := doneMetaFileResp{File: filepath.Base(outPath)}
// meta lesen (wenn vorhanden) // meta lesen (wenn vorhanden)
id := stripHotPrefix(strings.TrimSuffix(filepath.Base(outPath), filepath.Ext(outPath))) id := stripHotPrefix(strings.TrimSuffix(filepath.Base(outPath), filepath.Ext(outPath)))
// ✅ Preview-Sprite-Truth immer setzen (explizit true/false)
applyPreviewSpriteTruthToDoneMetaResp(id, &resp)
if strings.TrimSpace(id) != "" { if strings.TrimSpace(id) != "" {
if mp, merr := generatedMetaFile(id); merr == nil && strings.TrimSpace(mp) != "" { if mp, merr := generatedMetaFile(id); merr == nil && strings.TrimSpace(mp) != "" {
if mfi, serr := os.Stat(mp); serr == nil && mfi != nil && !mfi.IsDir() && mfi.Size() > 0 { if mfi, serr := os.Stat(mp); serr == nil && mfi != nil && !mfi.IsDir() && mfi.Size() > 0 {
@ -1385,6 +1767,9 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) {
} }
} }
// ✅ Preview-Sprite-Truth im LIST-Payload erzwingen (wichtig für Cards/Gallery)
applyPreviewSpriteTruthToRecordJobMeta(&c)
out = append(out, &c) out = append(out, &c)
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,8 +5,8 @@
<link rel="icon" type="image/svg+xml" href="/vite.svg" /> <link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" /> <meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
<title>App</title> <title>App</title>
<script type="module" crossorigin src="/assets/index-BjA9ZqZd.js"></script> <script type="module" crossorigin src="/assets/index-DNoPI-qJ.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-BZTD4GKM.css"> <link rel="stylesheet" crossorigin href="/assets/index-B-X4TsOo.css">
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>

View File

@ -40,7 +40,7 @@ async function apiJSON<T>(url: string, init?: RequestInit): Promise<T> {
return res.json() as Promise<T> return res.json() as Promise<T>
} }
type RecorderSettings = { type RecorderSettingsState = {
recordDir: string recordDir: string
doneDir: string doneDir: string
ffmpegPath?: string ffmpegPath?: string
@ -56,7 +56,7 @@ type RecorderSettings = {
lowDiskPauseBelowGB?: number lowDiskPauseBelowGB?: number
} }
const DEFAULT_RECORDER_SETTINGS: RecorderSettings = { const DEFAULT_RECORDER_SETTINGS: RecorderSettingsState = {
recordDir: 'records', recordDir: 'records',
doneDir: 'records/done', doneDir: 'records/done',
ffmpegPath: '', ffmpegPath: '',
@ -580,7 +580,7 @@ export default function App() {
const refreshModelsByKey = useCallback(async () => { const refreshModelsByKey = useCallback(async () => {
try { try {
const list = await apiJSON<StoredModel[]>('/api/models/list', { cache: 'no-store' as any }) const list = await apiJSON<StoredModel[]>('/api/models', { cache: 'no-store' as any })
setModelsByKey(buildModelsByKey(Array.isArray(list) ? list : [])) setModelsByKey(buildModelsByKey(Array.isArray(list) ? list : []))
setLastHeaderUpdateAtMs(Date.now()) setLastHeaderUpdateAtMs(Date.now())
} catch { } catch {
@ -656,7 +656,7 @@ export default function App() {
const detail = e?.detail ?? {} const detail = e?.detail ?? {}
const updated = detail?.model const updated = detail?.model
// ✅ 1) Update-Event mit Model: direkt in State übernehmen (KEIN /api/models/list) // ✅ 1) Update-Event mit Model: direkt in State übernehmen (KEIN /api/models)
if (updated && typeof updated === 'object') { if (updated && typeof updated === 'object') {
const k = String(updated.modelKey ?? '').toLowerCase().trim() const k = String(updated.modelKey ?? '').toLowerCase().trim()
if (k) setModelsByKey((prev) => ({ ...prev, [k]: updated })) if (k) setModelsByKey((prev) => ({ ...prev, [k]: updated }))
@ -706,11 +706,12 @@ export default function App() {
const [selectedTab, setSelectedTab] = useState('running') const [selectedTab, setSelectedTab] = useState('running')
const [playerJob, setPlayerJob] = useState<RecordJob | null>(null) const [playerJob, setPlayerJob] = useState<RecordJob | null>(null)
const [playerExpanded, setPlayerExpanded] = useState(false) const [playerExpanded, setPlayerExpanded] = useState(false)
const [playerStartAtSec, setPlayerStartAtSec] = useState<number | null>(null)
const [assetNonce, setAssetNonce] = useState(0) const [assetNonce, setAssetNonce] = useState(0)
const bumpAssets = useCallback(() => setAssetNonce((n) => n + 1), []) const bumpAssets = useCallback(() => setAssetNonce((n) => n + 1), [])
const [recSettings, setRecSettings] = useState<RecorderSettings>(DEFAULT_RECORDER_SETTINGS) const [recSettings, setRecSettings] = useState<RecorderSettingsState>(DEFAULT_RECORDER_SETTINGS)
const recSettingsRef = useRef(recSettings) const recSettingsRef = useRef(recSettings)
useEffect(() => { useEffect(() => {
recSettingsRef.current = recSettings recSettingsRef.current = recSettings
@ -1059,7 +1060,7 @@ export default function App() {
const load = async () => { const load = async () => {
try { try {
const s = await apiJSON<RecorderSettings>('/api/settings', { cache: 'no-store' }) const s = await apiJSON<RecorderSettingsState>('/api/settings', { cache: 'no-store' })
if (!cancelled && s) setRecSettings({ ...DEFAULT_RECORDER_SETTINGS, ...s }) if (!cancelled && s) setRecSettings({ ...DEFAULT_RECORDER_SETTINGS, ...s })
} catch { } catch {
// ignore // ignore
@ -1110,11 +1111,16 @@ export default function App() {
const initialCookies = useMemo(() => Object.entries(cookies).map(([name, value]) => ({ name, value })), [cookies]) const initialCookies = useMemo(() => Object.entries(cookies).map(([name, value]) => ({ name, value })), [cookies])
const openPlayer = useCallback((job: RecordJob) => { const openPlayer = useCallback((job: RecordJob, startAtSec?: number) => {
modelsCacheRef.current = null modelsCacheRef.current = null
setPlayerModel(null) setPlayerModel(null)
setPlayerJob(job) setPlayerJob(job)
setPlayerExpanded(false) setPlayerExpanded(false)
setPlayerStartAtSec(
typeof startAtSec === 'number' && Number.isFinite(startAtSec) && startAtSec >= 0
? startAtSec
: null
)
}, []) }, [])
const runningJobs = jobs.filter((j) => { const runningJobs = jobs.filter((j) => {
@ -1670,7 +1676,7 @@ export default function App() {
return return
} }
}, },
[selectedTab, refreshDoneNow, notify] [notify]
) )
const handleToggleHot = useCallback( const handleToggleHot = useCallback(
@ -2240,7 +2246,7 @@ export default function App() {
return stateHit return stateHit
} }
// ✅ 1) Wenn ensure gewünscht: DIREKT ensure (kein /api/models/list) // ✅ 1) Wenn ensure gewünscht: DIREKT ensure (kein /api/models)
if (wantEnsure) { if (wantEnsure) {
let host: string | undefined let host: string | undefined
@ -2270,7 +2276,7 @@ export default function App() {
if (seeded.length) { if (seeded.length) {
modelsCacheRef.current = { ts: now, list: seeded } modelsCacheRef.current = { ts: now, list: seeded }
} else { } else {
const list = await apiJSON<StoredModel[]>('/api/models/list', { cache: 'no-store' as any }) const list = await apiJSON<StoredModel[]>('/api/models', { cache: 'no-store' as any })
modelsCacheRef.current = { ts: now, list: Array.isArray(list) ? list : [] } modelsCacheRef.current = { ts: now, list: Array.isArray(list) ? list : [] }
} }
} }
@ -2380,7 +2386,7 @@ export default function App() {
window.removeEventListener('hover', kick) window.removeEventListener('hover', kick)
document.removeEventListener('visibilitychange', kick) document.removeEventListener('visibilitychange', kick)
} }
}, [autoAddEnabled, autoStartEnabled, startUrl]) }, [autoAddEnabled, autoStartEnabled, enqueueStart])
useEffect(() => { useEffect(() => {
const stop = startChaturbateOnlinePolling({ const stop = startChaturbateOnlinePolling({
@ -2905,7 +2911,11 @@ export default function App() {
modelsByKey={modelsByKey} modelsByKey={modelsByKey}
expanded={playerExpanded} expanded={playerExpanded}
onToggleExpand={() => setPlayerExpanded((s) => !s)} onToggleExpand={() => setPlayerExpanded((s) => !s)}
onClose={() => setPlayerJob(null)} onClose={() => {
setPlayerJob(null)
setPlayerStartAtSec(null)
}}
startAtSec={playerStartAtSec ?? undefined}
isHot={baseName(playerJob.output || '').startsWith('HOT ')} isHot={baseName(playerJob.output || '').startsWith('HOT ')}
isFavorite={Boolean(playerModel?.favorite)} isFavorite={Boolean(playerModel?.favorite)}
isLiked={playerModel?.liked === true} isLiked={playerModel?.liked === true}

View File

@ -3,7 +3,7 @@
import * as React from 'react' import * as React from 'react'
type Size = 'sm' | 'md' type Size = 'sm' | 'md' | 'lg'
export type ButtonGroupItem = { export type ButtonGroupItem = {
id: string id: string
@ -29,6 +29,7 @@ function cn(...parts: Array<string | false | null | undefined>) {
const sizeMap: Record<Size, { btn: string; icon: string; iconOnly: string }> = { const sizeMap: Record<Size, { btn: string; icon: string; iconOnly: string }> = {
sm: { btn: 'px-2.5 py-1.5 text-sm', icon: 'size-5', iconOnly: 'h-9 w-9' }, sm: { btn: 'px-2.5 py-1.5 text-sm', icon: 'size-5', iconOnly: 'h-9 w-9' },
md: { btn: 'px-3 py-2 text-sm', icon: 'size-5', iconOnly: 'h-10 w-10' }, md: { btn: 'px-3 py-2 text-sm', icon: 'size-5', iconOnly: 'h-10 w-10' },
lg: { btn: 'px-3.5 py-2.5 text-sm', icon: 'size-5', iconOnly: 'h-11 w-11' },
} }
export default function ButtonGroup({ export default function ButtonGroup({

View File

@ -216,7 +216,7 @@ export default function CategoriesTab() {
try { try {
// parallel laden // parallel laden
const [models, doneResp] = await Promise.all([ const [models, doneResp] = await Promise.all([
apiJSON<StoredModel[]>('/api/models/list', { apiJSON<StoredModel[]>('/api/models', {
cache: 'no-store' as any, cache: 'no-store' as any,
signal: ac.signal as any, signal: ac.signal as any,
}), }),

View File

@ -47,7 +47,7 @@ type Props = {
blurPreviews?: boolean blurPreviews?: boolean
teaserPlayback?: TeaserPlaybackMode teaserPlayback?: TeaserPlaybackMode
teaserAudio?: boolean teaserAudio?: boolean
onOpenPlayer: (job: RecordJob) => void onOpenPlayer: (job: RecordJob, startAtSec?: number) => void
onDeleteJob?: ( onDeleteJob?: (
job: RecordJob job: RecordJob
) => void | { undoToken?: string } | Promise<void | { undoToken?: string }> ) => void | { undoToken?: string } | Promise<void | { undoToken?: string }>
@ -748,11 +748,115 @@ export default function FinishedDownloads({
setInlinePlay((prev) => (prev?.key === key ? { key, nonce: prev.nonce + 1 } : { key, nonce: 1 })) setInlinePlay((prev) => (prev?.key === key ? { key, nonce: prev.nonce + 1 } : { key, nonce: 1 }))
}, []) }, [])
const startInlineAt = useCallback((key: string, seconds: number, domId: string) => {
const safeSeconds = Number.isFinite(seconds) && seconds > 0 ? seconds : 0
// Inline-Preview aktivieren / remount erzwingen
setInlinePlay((prev) => (prev?.key === key ? { key, nonce: prev.nonce + 1 } : { key, nonce: 1 }))
// Nach dem Rendern das Video suchen, seeken und autoplay versuchen
const trySeekAndPlay = (retriesLeft: number) => {
const host = document.getElementById(domId)
const v = host?.querySelector('video') as HTMLVideoElement | null
if (!v) {
if (retriesLeft > 0) {
requestAnimationFrame(() => trySeekAndPlay(retriesLeft - 1))
}
return
}
applyInlineVideoPolicy(v, { muted: previewMuted })
const applySeek = () => {
try {
const dur = Number(v.duration)
const maxSeek =
Number.isFinite(dur) && dur > 0
? Math.max(0, dur - 0.05)
: safeSeconds
v.currentTime = Math.max(0, Math.min(safeSeconds, maxSeek))
} catch {
// ignore
}
const p = v.play?.()
if (p && typeof (p as any).catch === 'function') {
;(p as Promise<void>).catch(() => {})
}
}
// Wenn Metadaten schon da sind -> direkt seeken
if (v.readyState >= 1) {
applySeek()
return
}
// Sonst warten bis metadata da sind
const onLoadedMetadata = () => {
v.removeEventListener('loadedmetadata', onLoadedMetadata)
applySeek()
}
v.addEventListener('loadedmetadata', onLoadedMetadata, { once: true })
// zusätzlich sofort play versuchen (hilft manchmal)
const p = v.play?.()
if (p && typeof (p as any).catch === 'function') {
;(p as Promise<void>).catch(() => {})
}
}
requestAnimationFrame(() => trySeekAndPlay(8))
}, [previewMuted])
const openPlayer = useCallback((job: RecordJob) => { const openPlayer = useCallback((job: RecordJob) => {
setInlinePlay(null) setInlinePlay(null)
onOpenPlayer(job) onOpenPlayer(job)
}, [onOpenPlayer]) }, [onOpenPlayer])
const openPlayerAt = useCallback((job: RecordJob, seconds: number) => {
const s = Number.isFinite(seconds) && seconds >= 0 ? seconds : 0
setInlinePlay(null)
onOpenPlayer(job, s)
}, [onOpenPlayer])
const handleScrubberClickIndex = useCallback(
(job: RecordJob, segmentIndex: number, segmentCount: number) => {
const idx = Number.isFinite(segmentIndex) ? Math.floor(segmentIndex) : 0
const count = Number.isFinite(segmentCount) ? Math.floor(segmentCount) : 0
if (count <= 0) {
// Fallback: Player normal öffnen
openPlayer(job)
return
}
// Dauer bevorzugt aus Preview-Metadaten, sonst aus Job
const k = keyFor(job)
const durationSec =
durations[k] ??
((job as any)?.durationSeconds as number | undefined) ??
0
if (!Number.isFinite(durationSec) || durationSec <= 0) {
// Wenn keine Dauer bekannt ist: trotzdem öffnen (ohne Timestamp)
openPlayer(job)
return
}
// Segment-Index -> Startsekunde
// Beispiel: 10 Segmente, Klick auf Index 0..9
const clampedIdx = Math.max(0, Math.min(idx, count - 1))
const secPerSegment = durationSec / count
const startAtSec = clampedIdx * secPerSegment
openPlayerAt(job, startAtSec)
},
[durations, keyFor, openPlayer, openPlayerAt]
)
const markDeleting = useCallback((key: string, value: boolean) => { const markDeleting = useCallback((key: string, value: boolean) => {
setDeletingKeys((prev) => { setDeletingKeys((prev) => {
const next = new Set(prev) const next = new Set(prev)
@ -1944,6 +2048,9 @@ export default function FinishedDownloads({
lower={lower} lower={lower}
onOpenPlayer={onOpenPlayer} onOpenPlayer={onOpenPlayer}
openPlayer={openPlayer} openPlayer={openPlayer}
onOpenPlayerAt={openPlayerAt}
handleScrubberClickIndex={handleScrubberClickIndex}
startInlineAt={startInlineAt}
startInline={startInline} startInline={startInline}
tryAutoplayInline={tryAutoplayInline} tryAutoplayInline={tryAutoplayInline}
registerTeaserHost={registerTeaserHost} registerTeaserHost={registerTeaserHost}
@ -1995,6 +2102,7 @@ export default function FinishedDownloads({
activeTagSet={activeTagSet} activeTagSet={activeTagSet}
onToggleTagFilter={toggleTagFilter} onToggleTagFilter={toggleTagFilter}
onOpenPlayer={onOpenPlayer} onOpenPlayer={onOpenPlayer}
handleScrubberClickIndex={handleScrubberClickIndex}
onSortModeChange={onSortModeChange} onSortModeChange={onSortModeChange}
page={page} page={page}
onPageChange={onPageChange} onPageChange={onPageChange}
@ -2030,6 +2138,7 @@ export default function FinishedDownloads({
deletedKeys={deletedKeys} deletedKeys={deletedKeys}
registerTeaserHost={registerTeaserHost} registerTeaserHost={registerTeaserHost}
onOpenPlayer={onOpenPlayer} onOpenPlayer={onOpenPlayer}
handleScrubberClickIndex={handleScrubberClickIndex}
deleteVideo={deleteVideo} deleteVideo={deleteVideo}
keepVideo={keepVideo} keepVideo={keepVideo}
onToggleHot={toggleHotVideo} onToggleHot={toggleHotVideo}

View File

@ -40,6 +40,8 @@ type Props = {
assetNonce?: number assetNonce?: number
handleScrubberClickIndex: (job: RecordJob, segmentIndex: number, segmentCount: number) => void
// helpers // helpers
keyFor: (j: RecordJob) => string keyFor: (j: RecordJob) => string
baseName: (p: string) => string baseName: (p: string) => string
@ -53,6 +55,8 @@ type Props = {
onHoverPreviewKeyChange?: (key: string | null) => void onHoverPreviewKeyChange?: (key: string | null) => void
onOpenPlayer: (job: RecordJob) => void onOpenPlayer: (job: RecordJob) => void
openPlayer: (job: RecordJob) => void openPlayer: (job: RecordJob) => void
onOpenPlayerAt?: (job: RecordJob, seconds: number) => void
startInlineAt?: (key: string, seconds: number, domId: string) => void
startInline: (key: string) => void startInline: (key: string) => void
tryAutoplayInline: (domId: string) => boolean tryAutoplayInline: (domId: string) => boolean
registerTeaserHost: (key: string) => (el: HTMLDivElement | null) => void registerTeaserHost: (key: string) => (el: HTMLDivElement | null) => void
@ -161,7 +165,7 @@ export default function FinishedDownloadsCardsView({
teaserAudio, teaserAudio,
hoverTeaserKey, hoverTeaserKey,
blurPreviews, blurPreviews,
durations, // ✅ fehlte durations,
teaserKey, teaserKey,
inlinePlay, inlinePlay,
deletingKeys, deletingKeys,
@ -170,6 +174,7 @@ export default function FinishedDownloadsCardsView({
swipeRefs, swipeRefs,
assetNonce, assetNonce,
handleScrubberClickIndex,
keyFor, keyFor,
baseName, baseName,
@ -182,6 +187,7 @@ export default function FinishedDownloadsCardsView({
onHoverPreviewKeyChange, onHoverPreviewKeyChange,
onOpenPlayer, onOpenPlayer,
openPlayer, openPlayer,
startInlineAt,
startInline, startInline,
tryAutoplayInline, tryAutoplayInline,
registerTeaserHost, registerTeaserHost,
@ -276,6 +282,7 @@ export default function FinishedDownloadsCardsView({
) )
const [scrubActiveByKey, setScrubActiveByKey] = React.useState<Record<string, number | undefined>>({}) const [scrubActiveByKey, setScrubActiveByKey] = React.useState<Record<string, number | undefined>>({})
const [scrubHoveringByKey, setScrubHoveringByKey] = React.useState<Record<string, boolean | undefined>>({})
const setScrubActiveIndex = React.useCallback((key: string, index: number | undefined) => { const setScrubActiveIndex = React.useCallback((key: string, index: number | undefined) => {
setScrubActiveByKey((prev) => { setScrubActiveByKey((prev) => {
@ -295,6 +302,19 @@ export default function FinishedDownloadsCardsView({
setScrubActiveIndex(key, undefined) setScrubActiveIndex(key, undefined)
}, [setScrubActiveIndex]) }, [setScrubActiveIndex])
const setScrubHovering = React.useCallback((key: string, hovering: boolean | undefined) => {
setScrubHoveringByKey((prev) => {
if (hovering === undefined) {
if (!(key in prev)) return prev
const next = { ...prev }
delete next[key]
return next
}
if (prev[key] === hovering) return prev
return { ...prev, [key]: hovering }
})
}, [])
const renderCardItem = ( const renderCardItem = (
j: RecordJob, j: RecordJob,
opts?: { opts?: {
@ -344,6 +364,7 @@ export default function FinishedDownloadsCardsView({
const meta = parseMeta(j) const meta = parseMeta(j)
const spriteInfo = previewScrubberInfoOf(j) const spriteInfo = previewScrubberInfoOf(j)
const scrubActiveIndex = scrubActiveByKey[k] const scrubActiveIndex = scrubActiveByKey[k]
const scrubHovering = scrubHoveringByKey[k] === true
// ✅ Sprite-Quelle wie in GalleryView (1 Request, danach nur CSS background-position) // ✅ Sprite-Quelle wie in GalleryView (1 Request, danach nur CSS background-position)
const spritePathRaw = firstNonEmptyString( const spritePathRaw = firstNonEmptyString(
@ -437,6 +458,11 @@ export default function FinishedDownloadsCardsView({
const scrubberCount = hasScrubberUi ? spriteCount : 0 const scrubberCount = hasScrubberUi ? spriteCount : 0
const scrubberStepSeconds = hasScrubberUi ? spriteStepSeconds : 0 const scrubberStepSeconds = hasScrubberUi ? spriteStepSeconds : 0
const scrubProgressRatio =
typeof scrubActiveIndex === 'number' && scrubberCount > 1
? clamp(scrubActiveIndex / (scrubberCount - 1), 0, 1)
: undefined
const spriteFrameStyle: React.CSSProperties | undefined = const spriteFrameStyle: React.CSSProperties | undefined =
hasSpriteScrubber && typeof scrubActiveIndex === 'number' hasSpriteScrubber && typeof scrubActiveIndex === 'number'
? (() => { ? (() => {
@ -456,9 +482,6 @@ export default function FinishedDownloadsCardsView({
})() })()
: undefined : undefined
const showScrubberSpriteInThumb = Boolean(spriteFrameStyle)
const hideTeaserUnderOverlay = showScrubberSpriteInThumb
const isHot = isHotName(fileRaw) const isHot = isHotName(fileRaw)
const isFav = Boolean(flags?.favorite) const isFav = Boolean(flags?.favorite)
const isLiked = flags?.liked === true const isLiked = flags?.liked === true
@ -566,7 +589,7 @@ export default function FinishedDownloadsCardsView({
onDuration={handleDuration} onDuration={handleDuration}
showPopover={false} showPopover={false}
blur={inlineActive ? false : Boolean(blurPreviews)} blur={inlineActive ? false : Boolean(blurPreviews)}
animated={hideTeaserUnderOverlay ? false : allowTeaserAnimation} animated={allowTeaserAnimation}
animatedMode="teaser" animatedMode="teaser"
animatedTrigger="always" animatedTrigger="always"
clipSeconds={1} clipSeconds={1}
@ -581,6 +604,8 @@ export default function FinishedDownloadsCardsView({
alwaysLoadStill={forceLoadStill} alwaysLoadStill={forceLoadStill}
teaserPreloadEnabled={opts?.mobileStackTopOnlyVideo ? true : !isSmall} teaserPreloadEnabled={opts?.mobileStackTopOnlyVideo ? true : !isSmall}
teaserPreloadRootMargin={isSmall ? '900px 0px' : '700px 0px'} teaserPreloadRootMargin={isSmall ? '900px 0px' : '700px 0px'}
scrubProgressRatio={scrubProgressRatio}
preferScrubProgress={scrubHovering && typeof scrubActiveIndex === 'number'}
/> />
{/* ✅ Sprite einmal vorladen, damit der erste Scrub-Move sofort sichtbar ist */} {/* ✅ Sprite einmal vorladen, damit der erste Scrub-Move sofort sichtbar ist */}
@ -596,24 +621,72 @@ export default function FinishedDownloadsCardsView({
) : null} ) : null}
{/* ✅ Scrub-Frame Overlay via Sprite (kein Request pro Move) */} {/* ✅ Scrub-Frame Overlay via Sprite (kein Request pro Move) */}
{hasSpriteScrubber && spriteFrameStyle ? ( {hasSpriteScrubber && spriteFrameStyle && !inlineActive ? (
<div className="absolute inset-0 z-[5]" aria-hidden="true"> <div className="absolute inset-x-0 top-0 bottom-[6px] z-[5]" aria-hidden="true">
<div className="h-full w-full" style={spriteFrameStyle} /> <div className="h-full w-full" style={spriteFrameStyle} />
</div> </div>
) : null} ) : null}
{/* ✅ stashapp-artiger Hover-Scrubber (wie GalleryView) */} {/* ✅ stashapp-artiger Hover-Scrubber (wie GalleryView) */}
{!opts?.isDecorative && scrubberCount > 1 ? ( {!opts?.isDecorative && !inlineActive && scrubberCount > 1 ? (
<div <div
className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150" className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150"
onClick={(e) => e.stopPropagation()} onClick={(e) => e.stopPropagation()}
onMouseDown={(e) => e.stopPropagation()} onMouseDown={(e) => e.stopPropagation()}
onMouseEnter={() => setScrubHovering(k, true)}
onMouseLeave={() => {
setScrubHovering(k, false)
// optional: Index sofort loslassen, dann springt Bar direkt zurück auf Teaser
setScrubActiveIndex(k, undefined)
}}
> >
<PreviewScrubber <PreviewScrubber
className="pointer-events-auto px-1" className="pointer-events-auto px-1"
imageCount={scrubberCount} imageCount={scrubberCount}
activeIndex={scrubActiveIndex} activeIndex={scrubActiveIndex}
onActiveIndexChange={(idx) => setScrubActiveIndex(k, idx)} onActiveIndexChange={(idx) => setScrubActiveIndex(k, idx)}
onIndexClick={(index) => {
// wie Preview-Klick: inline starten
if (isSmall || opts?.disableInline) {
// Mobile/Decorative/Fallback: bestehendes Verhalten
handleScrubberClickIndex(j, index, scrubberCount)
return
}
// Zielsekunde aus Scrubber ableiten
const seconds =
scrubberStepSeconds > 0
? index * scrubberStepSeconds
: 0
// 1) bevorzugt: direkt inline an Position starten (falls Parent das unterstützt)
if (startInlineAt) {
startInlineAt(k, seconds, inlineDomId)
// wie bei Tap im Mobile-Stack: Autoplay nochmal anschubsen
requestAnimationFrame(() => {
if (!tryAutoplayInline(inlineDomId)) {
requestAnimationFrame(() => {
tryAutoplayInline(inlineDomId)
})
}
})
return
}
// 2) Fallback: inline normal starten (ohne exakten Seek)
startInline(k)
requestAnimationFrame(() => {
if (!tryAutoplayInline(inlineDomId)) {
requestAnimationFrame(() => {
tryAutoplayInline(inlineDomId)
})
}
})
// 3) Optionaler Fallback auf bestehenden Handler (wenn du dort OpenPlayerAt machst)
// handleScrubberClickIndex(j, index, scrubberCount)
}}
stepSeconds={scrubberStepSeconds} stepSeconds={scrubberStepSeconds}
/> />
</div> </div>

View File

@ -42,6 +42,8 @@ type Props = {
handleDuration: (job: RecordJob, seconds: number) => void handleDuration: (job: RecordJob, seconds: number) => void
handleScrubberClickIndex: (job: RecordJob, segmentIndex: number, segmentCount: number) => void
keyFor: (j: RecordJob) => string keyFor: (j: RecordJob) => string
baseName: (p: string) => string baseName: (p: string) => string
modelNameFromOutput: (output?: string) => string modelNameFromOutput: (output?: string) => string
@ -148,6 +150,7 @@ export default function FinishedDownloadsGalleryView({
teaserKey, teaserKey,
handleDuration, handleDuration,
handleScrubberClickIndex,
keyFor, keyFor,
baseName, baseName,
modelNameFromOutput, modelNameFromOutput,
@ -394,6 +397,11 @@ export default function FinishedDownloadsGalleryView({
const activeScrubIndex = scrubIndexByKey[k] const activeScrubIndex = scrubIndexByKey[k]
const scrubProgressRatio =
typeof activeScrubIndex === 'number' && scrubberCount > 1
? clamp(activeScrubIndex / (scrubberCount - 1), 0, 1)
: undefined
// Sprite-Overlay-Frame (kein Request pro Move) // Sprite-Overlay-Frame (kein Request pro Move)
const spriteFrameStyle: React.CSSProperties | undefined = const spriteFrameStyle: React.CSSProperties | undefined =
hasSpriteScrubber && typeof activeScrubIndex === 'number' hasSpriteScrubber && typeof activeScrubIndex === 'number'
@ -481,6 +489,8 @@ export default function FinishedDownloadsGalleryView({
thumbSamples={18} thumbSamples={18}
muted={previewMuted} muted={previewMuted}
popoverMuted={previewMuted} popoverMuted={previewMuted}
scrubProgressRatio={scrubProgressRatio}
preferScrubProgress={typeof activeScrubIndex === 'number'}
/> />
</div> </div>
@ -498,11 +508,10 @@ export default function FinishedDownloadsGalleryView({
{/* ✅ Scrubber-Frame Overlay (Sprite-first = stashapp-like, kein Request pro Move) */} {/* ✅ Scrubber-Frame Overlay (Sprite-first = stashapp-like, kein Request pro Move) */}
{showScrubberSpriteInThumb && spriteFrameStyle ? ( {showScrubberSpriteInThumb && spriteFrameStyle ? (
<div className="absolute inset-0 z-[5]"> <div className="absolute inset-x-0 top-0 bottom-[6px] z-[5]" aria-hidden="true">
<div <div
className="h-full w-full" className="h-full w-full"
style={spriteFrameStyle} style={spriteFrameStyle}
aria-hidden="true"
/> />
</div> </div>
) : null} ) : null}
@ -526,12 +535,23 @@ export default function FinishedDownloadsGalleryView({
{/* ✅ stashapp-artiger Hover-Scrubber (UI-only) */} {/* ✅ stashapp-artiger Hover-Scrubber (UI-only) */}
{hasScrubber ? ( {hasScrubber ? (
<div className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150"> <div
className="absolute inset-x-0 bottom-0 z-30 pointer-events-none opacity-100 transition-opacity duration-150"
onClick={(e) => e.stopPropagation()}
onMouseDown={(e) => e.stopPropagation()}
>
<PreviewScrubber <PreviewScrubber
className="pointer-events-auto px-1" className="pointer-events-auto px-1"
imageCount={scrubberCount} imageCount={scrubberCount}
activeIndex={activeScrubIndex} activeIndex={activeScrubIndex}
onActiveIndexChange={(idx) => setScrubIndexForKey(k, idx)} onActiveIndexChange={(idx) => setScrubIndexForKey(k, idx)}
onIndexClick={(index) => {
// optional: UI-Zustand direkt sichtbar halten
setScrubIndexForKey(k, index)
// bestehender Handler (Parent entscheidet: openPlayerAt / modal / etc.)
handleScrubberClickIndex(j, index, scrubberCount)
}}
stepSeconds={scrubberStepSeconds} stepSeconds={scrubberStepSeconds}
/> />
</div> </div>

View File

@ -9,7 +9,6 @@ import type { RecordJob } from '../../types'
import FinishedVideoPreview from './FinishedVideoPreview' import FinishedVideoPreview from './FinishedVideoPreview'
import RecordJobActions from './RecordJobActions' import RecordJobActions from './RecordJobActions'
import TagOverflowRow from './TagOverflowRow' import TagOverflowRow from './TagOverflowRow'
import PreviewScrubber from './PreviewScrubber'
import { isHotName, stripHotPrefix } from './hotName' import { isHotName, stripHotPrefix } from './hotName'
import { formatResolution } from './formatters' import { formatResolution } from './formatters'
@ -59,6 +58,7 @@ type Props = {
modelsByKey: Record<string, { favorite?: boolean; liked?: boolean | null; watching?: boolean | null; tags?: string }> modelsByKey: Record<string, { favorite?: boolean; liked?: boolean | null; watching?: boolean | null; tags?: string }>
activeTagSet: Set<string> activeTagSet: Set<string>
onToggleTagFilter: (tag: string) => void onToggleTagFilter: (tag: string) => void
handleScrubberClickIndex: (job: RecordJob, segmentIndex: number, segmentCount: number) => void
// actions // actions
onOpenPlayer: (job: RecordJob) => void onOpenPlayer: (job: RecordJob) => void
@ -274,25 +274,6 @@ export default function FinishedDownloadsTableView({
draggable={false} draggable={false}
/> />
) : null} ) : null}
{/* Scrubber (Mobile sichtbar, Desktop nur Hover) */}
{spriteInfo ? (
<div
className="absolute inset-x-0 bottom-0 z-[20] px-0.5"
onPointerUp={() => setScrubActiveIndex(k, undefined)}
onPointerCancel={() => setScrubActiveIndex(k, undefined)}
onClick={(e) => e.stopPropagation()}
onMouseDown={(e) => e.stopPropagation()}
>
<PreviewScrubber
imageCount={spriteInfo.count}
activeIndex={scrubActiveIndex}
onActiveIndexChange={(idx) => setScrubActiveIndex(k, idx)}
stepSeconds={spriteInfo.stepSeconds}
className="opacity-100 pointer-events-auto md:opacity-0 md:pointer-events-none md:group-hover:opacity-100 md:group-focus-within:opacity-100 md:group-hover:pointer-events-auto md:group-focus-within:pointer-events-auto"
/>
</div>
) : null}
</div> </div>
) )
}, },

View File

@ -76,6 +76,9 @@ export type FinishedVideoPreviewProps = {
/** Vorlade-Zone für Teaser (IntersectionObserver rootMargin) */ /** Vorlade-Zone für Teaser (IntersectionObserver rootMargin) */
teaserPreloadRootMargin?: string teaserPreloadRootMargin?: string
scrubProgressRatio?: number
preferScrubProgress?: boolean
} }
export default function FinishedVideoPreview({ export default function FinishedVideoPreview({
@ -115,6 +118,8 @@ export default function FinishedVideoPreview({
alwaysLoadStill = false, alwaysLoadStill = false,
teaserPreloadEnabled = false, teaserPreloadEnabled = false,
teaserPreloadRootMargin = '700px 0px', teaserPreloadRootMargin = '700px 0px',
scrubProgressRatio,
preferScrubProgress = false,
}: FinishedVideoPreviewProps) { }: FinishedVideoPreviewProps) {
const file = getFileName(job.output || '') const file = getFileName(job.output || '')
const blurCls = blur ? 'blur-md' : '' const blurCls = blur ? 'blur-md' : ''
@ -135,7 +140,15 @@ export default function FinishedVideoPreview({
// ✅ falls job.meta keine previewClips enthält: meta.json nachladen // ✅ falls job.meta keine previewClips enthält: meta.json nachladen
const [fetchedMeta, setFetchedMeta] = useState<any | null>(null) const [fetchedMeta, setFetchedMeta] = useState<any | null>(null)
const metaForPreview = meta ?? fetchedMeta
// ✅ merge statt "meta ?? fetchedMeta"
// job.meta bleibt Basis, fetchedMeta ergänzt fehlende Felder (z.B. previewClips)
const metaForPreview = useMemo(() => {
if (!meta && !fetchedMeta) return null
if (!meta) return fetchedMeta
if (!fetchedMeta) return meta
return { ...meta, ...fetchedMeta }
}, [meta, fetchedMeta])
const [progressMountTick, setProgressMountTick] = useState(0) const [progressMountTick, setProgressMountTick] = useState(0)
@ -352,7 +365,7 @@ export default function FinishedVideoPreview({
// Ratio basiert auf vvDur (z.B. 2/18) — unabhängig von totalSeconds. // Ratio basiert auf vvDur (z.B. 2/18) — unabhängig von totalSeconds.
const readProgressStepped = ( const readProgressStepped = (
vv: HTMLVideoElement | null, vv: HTMLVideoElement | null,
totalSeconds: number | undefined, // bleibt drin (nur für clamp/teaser-end) totalSeconds: number | undefined,
stepSec = clipSeconds, stepSec = clipSeconds,
forceTeaserMap = false forceTeaserMap = false
): { ratio: number; globalSec: number; vvDur: number } => { ): { ratio: number; globalSec: number; vvDur: number } => {
@ -365,23 +378,21 @@ export default function FinishedVideoPreview({
const tPreview = Number(vv.currentTime) const tPreview = Number(vv.currentTime)
if (!Number.isFinite(tPreview) || tPreview < 0) return { ratio: 0, globalSec: 0, vvDur } if (!Number.isFinite(tPreview) || tPreview < 0) return { ratio: 0, globalSec: 0, vvDur }
let globalSec = 0
const m = previewClipMap const m = previewClipMap
let globalSec = 0
if (forceTeaserMap && Array.isArray(m) && m.length > 0) { if (forceTeaserMap && Array.isArray(m) && m.length > 0) {
const last = m[m.length - 1] const last = m[m.length - 1]
// Ende -> global = totalSeconds (falls bekannt), sonst Segment-Ende
if (tPreview >= last.cumEnd) { if (tPreview >= last.cumEnd) {
globalSec = globalSec =
typeof totalSeconds === 'number' && Number.isFinite(totalSeconds) && totalSeconds > 0 typeof totalSeconds === 'number' && Number.isFinite(totalSeconds) && totalSeconds > 0
? totalSeconds ? totalSeconds
: last.start + last.dur : last.start + last.dur
} else { } else {
// Segment finden
let lo = 0 let lo = 0
let hi = m.length - 1 let hi = m.length - 1
let seg = m[0] let segIdx = 0
while (lo <= hi) { while (lo <= hi) {
const mid = (lo + hi) >> 1 const mid = (lo + hi) >> 1
@ -389,24 +400,31 @@ export default function FinishedVideoPreview({
if (tPreview < c.cumStart) hi = mid - 1 if (tPreview < c.cumStart) hi = mid - 1
else if (tPreview >= c.cumEnd) lo = mid + 1 else if (tPreview >= c.cumEnd) lo = mid + 1
else { else {
seg = c segIdx = mid
break break
} }
} }
const within = Math.max(0, tPreview - seg.cumStart) const seg = m[segIdx]
const snapped = Math.floor(within / stepSec) * stepSec globalSec = seg.start
globalSec = seg.start + Math.min(snapped, seg.dur)
// ✅ Sichtbarer Teaser-Progress exakt am Segmentindex ausrichten
// springt auf Beginn des aktuell aktiven Preview-Segments (kein floor-lag über currentTime)
const ratio = m.length > 0 ? clamp01(segIdx / m.length) : 0
return { ratio, globalSec: Math.max(0, globalSec), vvDur }
} }
} else {
// inline/clips: global = currentTime (gesnappt)
globalSec = Math.floor(tPreview / stepSec) * stepSec
} }
// ✅ Balken-Ratio basiert auf vvDur // inline/clips fallback
const g = Math.max(0, Math.min(globalSec, vvDur)) if (Number.isFinite(stepSec) && stepSec > 0) {
const ratio = clamp01(g / vvDur) globalSec = Math.floor(tPreview / stepSec) * stepSec
return { ratio, globalSec: g, vvDur } } else {
globalSec = tPreview
}
const ratio = clamp01(Math.min(globalSec, vvDur) / vvDur)
return { ratio, globalSec: Math.max(0, globalSec), vvDur }
} }
const hardStop = (v: HTMLVideoElement | null) => { const hardStop = (v: HTMLVideoElement | null) => {
@ -561,7 +579,8 @@ export default function FinishedVideoPreview({
(animatedTrigger === 'always' || hovered) && (animatedTrigger === 'always' || hovered) &&
((animatedMode === 'teaser' && teaserOk && Boolean(teaserSrc)) || (animatedMode === 'clips' && hasDuration)) ((animatedMode === 'teaser' && teaserOk && Boolean(teaserSrc)) || (animatedMode === 'clips' && hasDuration))
const progressTotalSeconds = hasDuration ? effectiveDurationSec : undefined const progressTotalSeconds =
hasDuration && typeof effectiveDurationSec === 'number' ? effectiveDurationSec : undefined
// ✅ Still-Bild: optional immer laden (entkoppelt vom inView-Gating) // ✅ Still-Bild: optional immer laden (entkoppelt vom inView-Gating)
const shouldLoadStill = alwaysLoadStill || inView || everInView || (wantsHover && hovered) const shouldLoadStill = alwaysLoadStill || inView || everInView || (wantsHover && hovered)
@ -686,10 +705,7 @@ export default function FinishedVideoPreview({
const showProgressBar = const showProgressBar =
Boolean(progressVideoRef) && Boolean(progressVideoRef) &&
inView && inView
typeof progressTotalSeconds === 'number' &&
Number.isFinite(progressTotalSeconds) &&
progressTotalSeconds > 0
const progressKind: ProgressKind = const progressKind: ProgressKind =
showingInlineVideo ? 'inline' : teaserActive && animatedMode === 'teaser' ? 'teaser' : 'clips' showingInlineVideo ? 'inline' : teaserActive && animatedMode === 'teaser' ? 'teaser' : 'clips'
@ -705,9 +721,28 @@ export default function FinishedVideoPreview({
const frameRatio = showFrameProgress ? clamp01(thumbTimeSec! / effectiveDurationSec!) : 0 const frameRatio = showFrameProgress ? clamp01(thumbTimeSec! / effectiveDurationSec!) : 0
// finaler Balken: Video-Progress hat Priorität, sonst Frames-Progress const hasScrubProgress =
const progressRatio = showProgressBar ? playRatio : showFrameProgress ? frameRatio : 0 !showingInlineVideo &&
const showAnyProgress = showProgressBar || showFrameProgress preferScrubProgress &&
typeof scrubProgressRatio === 'number' &&
Number.isFinite(scrubProgressRatio)
// finaler Balken:
// 1) externer Scrub-Progress (wenn aktiv)
// 2) Video-Progress
// 3) Frames-Progress
const progressRatio = hasScrubProgress
? clamp01(scrubProgressRatio!)
: showProgressBar
? playRatio
: showFrameProgress
? frameRatio
: 0
const hasBasePreviewProgress = showProgressBar || showFrameProgress
const showAnyProgress =
!showingInlineVideo && (hasScrubProgress || hasBasePreviewProgress)
const clipOverlay = useMemo(() => { const clipOverlay = useMemo(() => {
if (!hasDuration) return null if (!hasDuration) return null
@ -818,7 +853,7 @@ export default function FinishedVideoPreview({
sync() sync()
// ✅ Sekundentakt (robust, unabhängig von raf/play-events) // ✅ Sekundentakt (robust, unabhängig von raf/play-events)
timer = window.setInterval(sync, 1000) timer = window.setInterval(sync, 100)
// optional: bei metadata/timeupdate sofort einmal syncen // optional: bei metadata/timeupdate sofort einmal syncen
const onLoaded = () => sync() const onLoaded = () => sync()
@ -898,10 +933,17 @@ export default function FinishedVideoPreview({
!showingInlineVideo && !showingInlineVideo &&
((onDuration && !hasDuration) || (onResolution && !hasResolution)) ((onDuration && !hasDuration) || (onResolution && !hasResolution))
const showTeaserSegments =
Boolean(clipOverlay) &&
(
progressKind === 'teaser' ||
(!showingInlineVideo && hasScrubProgress && animatedMode === 'teaser')
)
const previewNode = ( const previewNode = (
<div <div
ref={rootRef} ref={rootRef}
className={['group rounded bg-gray-100 dark:bg-white/5 overflow-hidden relative isolate', sizeClass, className ?? ''].join(' ')} className={['group bg-gray-100 dark:bg-white/5 overflow-hidden relative', sizeClass, className ?? ''].join(' ')}
onMouseEnter={wantsHover ? () => setHovered(true) : undefined} onMouseEnter={wantsHover ? () => setHovered(true) : undefined}
onMouseLeave={wantsHover ? () => setHovered(false) : undefined} onMouseLeave={wantsHover ? () => setHovered(false) : undefined}
onFocus={wantsHover ? () => setHovered(true) : undefined} onFocus={wantsHover ? () => setHovered(true) : undefined}
@ -1025,9 +1067,9 @@ export default function FinishedVideoPreview({
<div <div
aria-hidden="true" aria-hidden="true"
className={[ className={[
'absolute left-0 right-0 bottom-0 z-[2] pointer-events-none', 'absolute left-0 right-0 bottom-0 z-40 pointer-events-none',
// etwas höher + bei hover deutlich // etwas höher + bei hover deutlich
'h-0.5 group-hover:h-1.5', 'h-0.5 group-hover:h-1',
'transition-[height] duration-150 ease-out', 'transition-[height] duration-150 ease-out',
// Track: heller + border/inset für Kontrast // Track: heller + border/inset für Kontrast
'rounded-none group-hover:rounded-full', 'rounded-none group-hover:rounded-full',
@ -1037,9 +1079,9 @@ export default function FinishedVideoPreview({
].join(' ')} ].join(' ')}
> >
{/* 1) Segmente (previewClips) als Markierungen */} {/* 1) Segmente (previewClips) als Markierungen */}
{progressKind === 'teaser' && clipOverlay ? ( {showTeaserSegments ? (
<div className="absolute inset-0"> <div className="absolute inset-0">
{clipOverlay.map((c, i) => ( {clipOverlay!.map((c, i) => (
<div <div
key={`seg-${i}-${c.left.toFixed(6)}-${c.width.toFixed(6)}`} key={`seg-${i}-${c.left.toFixed(6)}-${c.width.toFixed(6)}`}
className="absolute top-0 bottom-0 bg-white/15 dark:bg-white/20" className="absolute top-0 bottom-0 bg-white/15 dark:bg-white/20"
@ -1054,22 +1096,15 @@ export default function FinishedVideoPreview({
{/* 2) Kontinuierlicher Fortschritt (SOLID, kein Gradient) */} {/* 2) Kontinuierlicher Fortschritt (SOLID, kein Gradient) */}
<div <div
className="absolute inset-0 origin-left transition-transform duration-150 ease-out" className={[
'absolute inset-0 origin-left',
progressKind === 'teaser' ? '' : 'transition-transform duration-150 ease-out',
].join(' ')}
style={{ style={{
transform: `scaleX(${clamp01(progressRatio)})`, transform: `scaleX(${clamp01(progressRatio)})`,
background: 'rgba(99,102,241,0.95)', // indigo-500-ish, kräftig background: 'rgba(99,102,241,0.95)', // indigo-500-ish, kräftig
}} }}
/> />
{/* 3) Knob am Ende (macht Progress sofort klar) */}
<div
className="absolute top-1/2 -translate-y-1/2 opacity-0 group-hover:opacity-100 group-focus-within:opacity-100 transition-opacity duration-150"
style={{
left: `calc(${clamp01(progressRatio) * 100}% - 4px)`,
}}
>
<div className="h-1.5 w-1.5 rounded-full bg-white/90 shadow-[0_0_0_2px_rgba(0,0,0,0.25),0_0_10px_rgba(168,85,247,0.55)]" />
</div>
</div> </div>
) : null} ) : null}

View File

@ -307,7 +307,7 @@ type BioResp = {
// ------ props ------ // ------ props ------
// ------ API types (local model store) ------ // ------ API types (local model store) ------
// /api/models/list liefert StoredModel aus dem models_store // /api/models liefert StoredModel aus dem models_store
type StoredModel = { type StoredModel = {
id: string id: string
modelKey: string modelKey: string
@ -434,7 +434,7 @@ export default function ModelDetails({
const refetchModels = React.useCallback(async () => { const refetchModels = React.useCallback(async () => {
try { try {
const r = await fetch('/api/models/list', { cache: 'no-store' }) const r = await fetch('/api/models', { cache: 'no-store' })
const data = (await r.json().catch(() => null)) as any const data = (await r.json().catch(() => null)) as any
setModels(Array.isArray(data) ? data : []) setModels(Array.isArray(data) ? data : [])
} catch { } catch {
@ -501,7 +501,7 @@ export default function ModelDetails({
if (!open) return if (!open) return
let alive = true let alive = true
setModelsLoading(true) setModelsLoading(true)
fetch('/api/models/list', { cache: 'no-store' }) fetch('/api/models', { cache: 'no-store' })
.then((r) => r.json()) .then((r) => r.json())
.then((data: StoredModel[]) => { .then((data: StoredModel[]) => {
if (!alive) return if (!alive) return

File diff suppressed because it is too large Load Diff

View File

@ -263,6 +263,7 @@ export type PlayerProps = {
onStopJob?: (id: string) => void | Promise<void> onStopJob?: (id: string) => void | Promise<void>
startMuted?: boolean startMuted?: boolean
startAtSec?: number
} }
export default function Player({ export default function Player({
@ -284,6 +285,7 @@ export default function Player({
onToggleWatch, onToggleWatch,
onStopJob, onStopJob,
startMuted = DEFAULT_PLAYER_START_MUTED, startMuted = DEFAULT_PLAYER_START_MUTED,
startAtSec = 0
}: PlayerProps) { }: PlayerProps) {
const title = React.useMemo( const title = React.useMemo(
() => baseName(job.output?.trim() || '') || job.id, () => baseName(job.output?.trim() || '') || job.id,
@ -588,6 +590,14 @@ export default function Player({
return baseName(job.output?.trim() || '') || job.id return baseName(job.output?.trim() || '') || job.id
}, [job.output, job.id]) }, [job.output, job.id])
const normalizedStartAtSec = React.useMemo(() => {
const n = Number(startAtSec)
return Number.isFinite(n) && n >= 0 ? n : 0
}, [startAtSec])
// Merkt sich, für welchen "Open-Zustand" wir den initialen Seek schon angewendet haben
const appliedStartSeekRef = React.useRef<string>('')
React.useEffect(() => { React.useEffect(() => {
if (isRunning) { if (isRunning) {
setMetaReady(true) setMetaReady(true)
@ -895,6 +905,25 @@ export default function Player({
} catch {} } catch {}
}, []) }, [])
const seekPlayerToAbsolute = React.useCallback((absSec: number) => {
const p: any = playerRef.current
if (!p || p.isDisposed?.()) return
const target = Math.max(0, Number(absSec) || 0)
try {
// Shim ist installiert -> p.currentTime(...) interpretiert absolute Zeit korrekt
const dur = Number(p.duration?.() ?? 0)
const maxSeek = Number.isFinite(dur) && dur > 0 ? Math.max(0, dur - 0.05) : target
p.currentTime(Math.min(target, maxSeek))
p.trigger?.('timeupdate')
} catch {
try {
p.currentTime(target)
} catch {}
}
}, [])
React.useEffect(() => { React.useEffect(() => {
if (!mounted) return if (!mounted) return
if (!isRunning && !metaReady) { if (!isRunning && !metaReady) {
@ -925,8 +954,12 @@ export default function Player({
// ✅ NICHT neu setzen, wenn Source identisch ist (verhindert "cancelled" durch unnötige Reloads) // ✅ NICHT neu setzen, wenn Source identisch ist (verhindert "cancelled" durch unnötige Reloads)
const curSrc = String((p as any).currentSrc?.() || '') const curSrc = String((p as any).currentSrc?.() || '')
// ✅ immer zurücksetzen, sobald der Effekt für diese media.src läuft
// (auch wenn wir die gleiche Source behalten)
appliedStartSeekRef.current = ''
if (curSrc && curSrc === media.src) { if (curSrc && curSrc === media.src) {
// trotzdem versuchen zu spielen (z.B. wenn nur muted/state geändert wurde)
const ret = p.play?.() const ret = p.play?.()
if (ret && typeof (ret as any).catch === 'function') (ret as Promise<void>).catch(() => {}) if (ret && typeof (ret as any).catch === 'function') (ret as Promise<void>).catch(() => {})
return return
@ -976,6 +1009,87 @@ export default function Player({
tryPlay() tryPlay()
}, [mounted, isRunning, metaReady, media.src, media.type, startMuted, updateIntrinsicDims, fullDurationSec, releaseMedia]) }, [mounted, isRunning, metaReady, media.src, media.type, startMuted, updateIntrinsicDims, fullDurationSec, releaseMedia])
React.useEffect(() => {
if (!mounted) return
if (isRunning) return // Live spielt nicht über Video.js
if (!metaReady) return
if (!media.src) return
const p: any = playerRef.current
if (!p || p.isDisposed?.()) return
// Nur seeken, wenn wirklich eine Startzeit angefordert wurde
if (!(normalizedStartAtSec > 0)) {
appliedStartSeekRef.current = ''
return
}
const seekSig = `${playbackKey}|${media.src}|${normalizedStartAtSec.toFixed(3)}`
if (appliedStartSeekRef.current === seekSig) return
let cancelled = false
const apply = () => {
if (cancelled) return
const pp: any = playerRef.current
if (!pp || pp.isDisposed?.()) return
// ✅ nur seeken, wenn die AKTUELLE source wirklich geladen ist
const currentSrc = String(pp.currentSrc?.() || '')
if (!currentSrc || currentSrc !== media.src) return
// readyState >= 1 => metadata verfügbar
const techEl =
pp.tech?.(true)?.el?.() ||
pp.el?.()?.querySelector?.('video.vjs-tech')
const readyState =
techEl instanceof HTMLVideoElement ? Number(techEl.readyState || 0) : 0
if (readyState < 1) return
seekPlayerToAbsolute(normalizedStartAtSec)
appliedStartSeekRef.current = seekSig
try {
const ret = pp.play?.()
if (ret && typeof ret.catch === 'function') ret.catch(() => {})
} catch {}
}
// ✅ Erst versuchen (falls schon geladen)
apply()
if (appliedStartSeekRef.current === seekSig) return
// ✅ Dann auf Events warten (neue Source lädt noch)
const onLoaded = () => apply()
p.one?.('loadedmetadata', onLoaded)
p.one?.('canplay', onLoaded)
p.one?.('durationchange', onLoaded)
// Extra fallback (manche Browser/Event-Reihenfolgen zickig)
const t1 = window.setTimeout(apply, 0)
const t2 = window.setTimeout(apply, 120)
return () => {
cancelled = true
window.clearTimeout(t1)
window.clearTimeout(t2)
try { p.off?.('loadedmetadata', onLoaded) } catch {}
try { p.off?.('canplay', onLoaded) } catch {}
try { p.off?.('durationchange', onLoaded) } catch {}
}
}, [
mounted,
isRunning,
metaReady,
media.src,
playbackKey,
normalizedStartAtSec,
seekPlayerToAbsolute,
])
React.useEffect(() => { React.useEffect(() => {
if (!mounted) return if (!mounted) return
const p = playerRef.current const p = playerRef.current

View File

@ -8,7 +8,7 @@ type Props = {
imageCount: number imageCount: number
activeIndex?: number activeIndex?: number
onActiveIndexChange: (index: number | undefined) => void onActiveIndexChange: (index: number | undefined) => void
onClickIndex?: (index: number) => void onIndexClick?: (index: number) => void
className?: string className?: string
stepSeconds?: number stepSeconds?: number
} }
@ -28,7 +28,7 @@ export default function PreviewScrubber({
imageCount, imageCount,
activeIndex, activeIndex,
onActiveIndexChange, onActiveIndexChange,
onClickIndex, onIndexClick,
className, className,
stepSeconds = 0, stepSeconds = 0,
}: Props) { }: Props) {
@ -110,11 +110,11 @@ export default function PreviewScrubber({
const handleClick = React.useCallback( const handleClick = React.useCallback(
(e: React.MouseEvent<HTMLDivElement>) => { (e: React.MouseEvent<HTMLDivElement>) => {
e.stopPropagation() e.stopPropagation()
if (!onClickIndex) return if (!onIndexClick) return
const idx = indexFromClientX(e.clientX) const idx = indexFromClientX(e.clientX)
if (typeof idx === 'number') onClickIndex(idx) if (typeof idx === 'number') onIndexClick(idx)
}, },
[indexFromClientX, onClickIndex] [indexFromClientX, onIndexClick]
) )
if (!imageCount || imageCount < 1) return null if (!imageCount || imageCount < 1) return null
@ -168,7 +168,7 @@ export default function PreviewScrubber({
aria-valuemax={imageCount} aria-valuemax={imageCount}
aria-valuenow={typeof activeIndex === 'number' ? activeIndex + 1 : undefined} aria-valuenow={typeof activeIndex === 'number' ? activeIndex + 1 : undefined}
> >
<div className="pointer-events-none absolute inset-x-1 bottom-[3px] h-3 rounded-sm bg-white/35 ring-1 ring-white/40 backdrop-blur-[1px]"> <div className="pointer-events-none absolute inset-x-0 bottom-0 h-4 bg-white/35 ring-1 ring-white/40 backdrop-blur-[1px]">
{typeof markerLeftPct === 'number' ? ( {typeof markerLeftPct === 'number' ? (
<div <div
className="absolute inset-y-0 w-[2px] bg-white shadow-[0_0_0_1px_rgba(0,0,0,0.35)]" className="absolute inset-y-0 w-[2px] bg-white shadow-[0_0_0_1px_rgba(0,0,0,0.35)]"
@ -182,7 +182,7 @@ export default function PreviewScrubber({
<div <div
className={[ className={[
'pointer-events-none absolute bottom-[17px] z-10', 'pointer-events-none absolute bottom-[19px] z-10',
'rounded bg-black/70 px-1.5 py-0.5', 'rounded bg-black/70 px-1.5 py-0.5',
'text-[11px] leading-none text-white whitespace-nowrap', 'text-[11px] leading-none text-white whitespace-nowrap',
'transition-opacity duration-100', 'transition-opacity duration-100',