updated to sse

This commit is contained in:
Linrador 2026-03-04 18:44:22 +01:00
parent d578d4e6aa
commit 81f02c9941
18 changed files with 715 additions and 1052 deletions

Binary file not shown.

View File

@ -483,12 +483,18 @@ func applyPreviewSpriteTruthToRecordJobMeta(j *RecordJob) {
// ---------------- Handlers ----------------
func recordList(w http.ResponseWriter, r *http.Request) {
func recordJobs(w http.ResponseWriter, r *http.Request) {
if !mustMethod(w, r, http.MethodGet) {
return
}
t0 := time.Now()
jobsMu.Lock()
wait := time.Since(t0)
if wait > 200*time.Millisecond {
fmt.Println("[recordJobs] waited for jobsMu:", wait)
}
list := make([]*RecordJob, 0, len(jobs))
for _, j := range jobs {
if j == nil || j.Hidden {
@ -496,7 +502,7 @@ func recordList(w http.ResponseWriter, r *http.Request) {
}
list = append(list, j)
}
jobsMu.Unlock()
jobsMu.Unlock() // ✅ früh unlocken
sort.Slice(list, func(i, j int) bool {
return list[i].StartedAt.After(list[j].StartedAt)

View File

@ -30,8 +30,7 @@ func registerRoutes(mux *http.ServeMux, auth *AuthManager) *ModelStore {
api.HandleFunc("/api/cookies", cookiesHandler)
api.HandleFunc("/api/record/done/stream", handleDoneStream)
api.HandleFunc("/api/perf/stream", perfStreamHandler)
api.HandleFunc("/api/stream", appStream)
api.HandleFunc("/api/status/disk", diskStatusHandler)
api.HandleFunc("/api/autostart/state", autostartStateHandler)
@ -50,8 +49,7 @@ func registerRoutes(mux *http.ServeMux, auth *AuthManager) *ModelStore {
api.HandleFunc("/api/preview/live", recordPreviewLive)
api.HandleFunc("/api/preview-scrubber/", recordPreviewScrubberFrame)
api.HandleFunc("/api/preview-sprite/", recordPreviewSprite)
api.HandleFunc("/api/record/list", recordList)
api.HandleFunc("/api/record/stream", recordStream)
api.HandleFunc("/api/record/jobs", recordJobs)
api.HandleFunc("/api/record/done/meta", recordDoneMeta)
api.HandleFunc("/api/record/video", recordVideo)
api.HandleFunc("/api/record/done", recordDoneList)
@ -71,8 +69,6 @@ func registerRoutes(mux *http.ServeMux, auth *AuthManager) *ModelStore {
// Tasks
api.HandleFunc("/api/tasks/generate-assets", tasksGenerateAssets)
api.HandleFunc("/api/tasks/assets/stream", assetsStream)
// --------------------------
// 3) ModelStore (Postgres)
// DSN kommt aus Settings: databaseUrl + gespeichertes Passwort

View File

@ -63,6 +63,9 @@ var (
// assets task stream
assetsHub = newSSEHub()
assetsNotify = make(chan struct{}, 1)
// perf stream (periodic snapshot)
perfHub = newSSEHub()
)
func notifyDoneChanged() {
@ -86,7 +89,7 @@ func notifyAssetsChanged() {
}
}
// initSSE startet die Debounce-Broadcaster.
// initSSE startet die Debounce-/Ticker-Broadcaster.
// Wichtig: wird aus main.go init() aufgerufen.
func initSSE() {
// Debounced broadcaster (jobs)
@ -123,7 +126,7 @@ func initSSE() {
}
}()
// Debounced broadcaster (assets task)
// Debounced broadcaster (assets task)
go func() {
for range assetsNotify {
time.Sleep(80 * time.Millisecond)
@ -141,9 +144,22 @@ func initSSE() {
}
}
}()
// Periodic broadcaster (perf)
go func() {
t := time.NewTicker(3 * time.Second)
defer t.Stop()
for range t.C {
b := perfSnapshotJSON()
if len(b) > 0 {
perfHub.broadcast(b)
}
}
}()
}
// -------------------- SSE: /api/record/stream --------------------
// -------------------- Snapshots --------------------
// jobsSnapshotJSON liefert die aktuelle (gefilterte) Job-Liste als JSON.
// Greift auf jobs/jobsMu aus main.go zu (gleiches Package).
@ -170,7 +186,43 @@ func jobsSnapshotJSON() []byte {
return b
}
func recordStream(w http.ResponseWriter, r *http.Request) {
func assetsSnapshotJSON() []byte {
assetsTaskMu.Lock()
st := assetsTaskState
assetsTaskMu.Unlock()
b, _ := json.Marshal(st)
return b
}
// perfSnapshotJSON liefert einen Snapshot für das Frontend (PerformanceMonitor).
//
// ✅ WICHTIG: Hier musst du die bestehende Logik aus deinem perfStreamHandler
// (CPU%, Disk-Free/Total/Used%, serverMs) in eine gemeinsame Funktion ziehen.
// Diese Stub-Version kompiliert, liefert aber nur serverMs (Rest null).
func perfSnapshotJSON() []byte {
payload := map[string]any{
"cpuPercent": nil,
"diskFreeBytes": nil,
"diskTotalBytes": nil,
"diskUsedPercent": nil,
"serverMs": time.Now().UnixMilli(), // Frontend: ping = Date.now() - serverMs
}
b, _ := json.Marshal(payload)
return b
}
// -------------------- SSE: /api/stream (UNIFIED) --------------------
//
// Ein Stream für:
// - event: jobs -> []RecordJob
// - event: doneChanged-> {"type":"doneChanged","seq":...,"ts":...}
// - event: state -> assetsTaskState
// - event: perf -> PerfSnapshot
//
// Frontend soll nur noch /api/stream öffnen (sseSingleton deduped per URL).
func appStream(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed)
return
@ -187,13 +239,12 @@ func recordStream(w http.ResponseWriter, r *http.Request) {
h.Set("Content-Type", "text/event-stream; charset=utf-8")
h.Set("Cache-Control", "no-cache, no-transform")
h.Set("Connection", "keep-alive")
h.Set("X-Accel-Buffering", "no") // hilfreich bei Reverse-Proxies
h.Set("X-Accel-Buffering", "no")
// sofort starten
w.WriteHeader(http.StatusOK)
writeEvent := func(event string, data []byte) bool {
// returns false => client weg / write error
if event != "" {
if _, err := fmt.Fprintf(w, "event: %s\n", event); err != nil {
return false
@ -204,7 +255,6 @@ func recordStream(w http.ResponseWriter, r *http.Request) {
return false
}
} else {
// empty payload ok (nur terminator)
if _, err := io.WriteString(w, "\n"); err != nil {
return false
}
@ -227,134 +277,69 @@ func recordStream(w http.ResponseWriter, r *http.Request) {
}
flusher.Flush()
// Channel + Hub
ch := make(chan []byte, 32)
recordJobsHub.add(ch)
defer recordJobsHub.remove(ch)
// pro Client: je Hub ein Channel
jobsCh := make(chan []byte, 32)
doneCh := make(chan []byte, 32)
assetsCh := make(chan []byte, 32)
perfCh := make(chan []byte, 32)
// Initialer Snapshot sofort
recordJobsHub.add(jobsCh)
defer recordJobsHub.remove(jobsCh)
doneHub.add(doneCh)
defer doneHub.remove(doneCh)
assetsHub.add(assetsCh)
defer assetsHub.remove(assetsCh)
perfHub.add(perfCh)
defer perfHub.remove(perfCh)
// Initial Snapshots
if b := jobsSnapshotJSON(); len(b) > 0 {
if !writeEvent("jobs", b) {
return
}
}
ctx := r.Context()
// Ping/Keepalive
ping := time.NewTicker(15 * time.Second)
defer ping.Stop()
for {
select {
case <-ctx.Done():
return
case b, ok := <-ch:
if !ok {
return
}
if len(b) == 0 {
continue
}
// Burst-Coalescing: wenn viele Updates schnell kommen, nur das neueste senden
last := b
drain:
for i := 0; i < 64; i++ {
select {
case nb, ok := <-ch:
if !ok {
return
}
if len(nb) > 0 {
last = nb
}
default:
break drain
}
}
if !writeEvent("jobs", last) {
// done: initialer "kick" (hilft, UI sofort zu syncen)
seq := atomic.LoadUint64(&doneSeq)
initDone := []byte(fmt.Sprintf(`{"type":"doneChanged","seq":%d,"ts":%d}`, seq, time.Now().UnixMilli()))
if !writeEvent("doneChanged", initDone) {
return
}
case <-ping.C:
// Keepalive als Kommentar (stört nicht, hält Verbindungen offen)
if !writeComment(fmt.Sprintf("ping %d", time.Now().Unix())) {
return
}
}
}
}
// -------------------- SSE: /api/tasks/assets/stream --------------------
func assetsSnapshotJSON() []byte {
assetsTaskMu.Lock()
st := assetsTaskState
assetsTaskMu.Unlock()
b, _ := json.Marshal(st)
return b
}
func assetsStream(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Nur GET erlaubt", http.StatusMethodNotAllowed)
return
}
flusher, ok := w.(http.Flusher)
if !ok {
http.Error(w, "Streaming nicht unterstützt", http.StatusInternalServerError)
return
}
h := w.Header()
h.Set("Content-Type", "text/event-stream; charset=utf-8")
h.Set("Cache-Control", "no-cache, no-transform")
h.Set("Connection", "keep-alive")
h.Set("X-Accel-Buffering", "no")
w.WriteHeader(http.StatusOK)
// Reconnect-Hinweis
fmt.Fprintf(w, "retry: 3000\n\n")
flusher.Flush()
writeEvent := func(event string, data []byte) bool {
if event != "" {
if _, err := fmt.Fprintf(w, "event: %s\n", event); err != nil {
return false
}
}
if _, err := fmt.Fprintf(w, "data: %s\n\n", data); err != nil {
return false
}
flusher.Flush()
return true
}
writeComment := func(msg string) bool {
if _, err := fmt.Fprintf(w, ": %s\n\n", msg); err != nil {
return false
}
flusher.Flush()
return true
}
ch := make(chan []byte, 32)
assetsHub.add(ch)
defer assetsHub.remove(ch)
// Initial Snapshot
if b := assetsSnapshotJSON(); len(b) > 0 {
if !writeEvent("state", b) {
return
}
}
if b := perfSnapshotJSON(); len(b) > 0 {
if !writeEvent("perf", b) {
return
}
}
// coalesce helper: wenn Burst, nur latest senden
drainLatest := func(first []byte, ch <-chan []byte) []byte {
last := first
for i := 0; i < 64; i++ {
select {
case nb, ok := <-ch:
if !ok {
return last
}
if len(nb) > 0 {
last = nb
}
default:
return last
}
}
return last
}
ctx := r.Context()
ping := time.NewTicker(15 * time.Second)
defer ping.Stop()
@ -364,34 +349,54 @@ func assetsStream(w http.ResponseWriter, r *http.Request) {
case <-ctx.Done():
return
case b, ok := <-ch:
case b, ok := <-jobsCh:
if !ok {
return
}
if len(b) == 0 {
continue
}
// coalesce
last := b
drain:
for i := 0; i < 64; i++ {
select {
case nb, ok := <-ch:
last := drainLatest(b, jobsCh)
if !writeEvent("jobs", last) {
return
}
case b, ok := <-doneCh:
if !ok {
return
}
if len(nb) > 0 {
last = nb
}
default:
break drain
if len(b) == 0 {
continue
}
last := drainLatest(b, doneCh)
if !writeEvent("doneChanged", last) {
return
}
case b, ok := <-assetsCh:
if !ok {
return
}
if len(b) == 0 {
continue
}
last := drainLatest(b, assetsCh)
if !writeEvent("state", last) {
return
}
case b, ok := <-perfCh:
if !ok {
return
}
if len(b) == 0 {
continue
}
last := drainLatest(b, perfCh)
if !writeEvent("perf", last) {
return
}
case <-ping.C:
if !writeComment(fmt.Sprintf("ping %d", time.Now().Unix())) {
return

View File

@ -72,10 +72,14 @@ func tasksGenerateAssets(w http.ResponseWriter, r *http.Request) {
// cancelbarer Context (pro Run)
ctx, cancel := context.WithCancel(context.Background())
assetsTaskMu.Lock()
assetsTaskCancel = cancel
assetsTaskMu.Unlock()
now := time.Now()
assetsTaskState = AssetsTaskState{
st := updateAssetsState(func(st *AssetsTaskState) {
*st = AssetsTaskState{
Running: true,
Total: 0,
Done: 0,
@ -87,14 +91,9 @@ func tasksGenerateAssets(w http.ResponseWriter, r *http.Request) {
Error: "",
CurrentFile: "",
}
st := assetsTaskState
assetsTaskMu.Unlock()
// ✅ SSE: Start pushen
notifyAssetsChanged()
})
go runGenerateMissingAssets(ctx)
writeJSON(w, http.StatusOK, st)
return

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,8 +5,8 @@
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1, viewport-fit=cover" />
<title>App</title>
<script type="module" crossorigin src="/assets/index-C4whm-WW.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-3IFBscEU.css">
<script type="module" crossorigin src="/assets/index-BC3HxqFv.js"></script>
<link rel="stylesheet" crossorigin href="/assets/index-e_Qq8t1c.css">
</head>
<body>
<div id="root"></div>

View File

@ -19,6 +19,7 @@ import { useNotify } from './components/ui/notify'
//import { startChaturbateOnlinePolling } from './lib/chaturbateOnlinePoller'
import CategoriesTab from './components/ui/CategoriesTab'
import LoginPage from './components/ui/LoginPage'
import { subscribeSSE } from './lib/sseSingleton'
const COOKIE_STORAGE_KEY = 'record_cookies'
@ -107,12 +108,6 @@ type ChaturbateOnlineRoom = {
image_url?: string
}
type ChaturbateOnlineResponse = {
enabled: boolean
rooms: ChaturbateOnlineRoom[]
total?: number
}
function normalizeHttpUrl(raw: string): string | null {
let v = (raw ?? '').trim()
if (!v) return null
@ -540,28 +535,6 @@ export default function App() {
}
}, [])
const loadJobs = useCallback(async () => {
try {
const res = await fetch('/api/record/list', { cache: 'no-store' as any })
if (!res.ok) return
const data = await res.json().catch(() => null)
// akzeptiere: Array oder { items: [] }
const items = Array.isArray(data)
? (data as RecordJob[])
: Array.isArray(data?.items)
? (data.items as RecordJob[])
: []
setJobs(items)
jobsRef.current = items
setLastHeaderUpdateAtMs(Date.now())
} catch {
// ignore
}
}, [])
useEffect(() => {
try {
window.localStorage.setItem(DONE_SORT_KEY, doneSort)
@ -779,6 +752,9 @@ export default function App() {
const cookiesRef = useRef<Record<string, string>>({})
const jobsRef = useRef<RecordJob[]>([])
const lastDoneSseAtRef = useRef(0)
const lastJobsSseAtRef = useRef(0)
// ✅ "Job gestartet" Toast: dedupe (auch gegen SSE/polling) + initial-load suppression
const startedToastByJobIdRef = useRef<Record<string, true>>({})
const jobsInitDoneRef = useRef(false)
@ -943,17 +919,6 @@ export default function App() {
const [cbOnlineByKeyLower, setCbOnlineByKeyLower] = useState<Record<string, ChaturbateOnlineRoom>>({})
const cbOnlineByKeyLowerRef = useRef<Record<string, ChaturbateOnlineRoom>>({})
const lastCbShowByKeyLowerRef = useRef<Record<string, string>>({})
// ✅ merkt sich, ob ein Model im letzten Snapshot überhaupt online war
const lastCbOnlineByKeyLowerRef = useRef<Record<string, true>>({})
// ✅ verhindert Toast-Spam direkt beim ersten Poll (Startup)
const cbOnlineInitDoneRef = useRef(false)
// ✅ merkt sich, ob ein Model seit App-Start schon einmal online war
const everCbOnlineByKeyLowerRef = useRef<Record<string, true>>({})
useEffect(() => {
cbOnlineByKeyLowerRef.current = cbOnlineByKeyLower
}, [cbOnlineByKeyLower])
@ -1313,12 +1278,27 @@ export default function App() {
if (donePage > maxPage) setDonePage(maxPage)
}, [doneCount, donePage])
// jobs SSE / polling (mit "Job gestartet" Toast für Backend-Autostarts)
useEffect(() => {
if (!authed) return
let es: EventSource | null = null
const unsub = subscribeSSE<RecordJob[]>('/api/stream', 'jobs', (data) => {
lastJobsSseAtRef.current = Date.now() // ✅ hinzufügen
const items = Array.isArray(data) ? data : []
setJobs(items)
jobsRef.current = items
setLastHeaderUpdateAtMs(Date.now())
})
return () => unsub()
}, [authed])
// ✅ doneChanged über unified SSE (/api/stream) + Poll-Fallback bei Inaktivität
useEffect(() => {
if (!authed) return
let timer: number | null = null
const lastFireRef = { t: 0 }
let coalesceTimer: number | null = null
const stopPoll = () => {
if (timer != null) {
@ -1331,19 +1311,21 @@ export default function App() {
if (timer != null) return
timer = window.setInterval(() => {
if (document.hidden) return
// ✅ Wenn SSE "lebt", Poll sparen
const age = Date.now() - (lastDoneSseAtRef.current || 0)
if (age < 12_000) return
if (selectedTabRef.current === 'finished') {
void loadDoneCount()
requestFinishedReload('done-stream poll tick')
requestFinishedReload('done-stream poll fallback tick')
} else {
void loadDoneCount()
}
}, document.hidden ? 60000 : 15000)
}
const lastFireRef = { t: 0 }
let coalesceTimer: number | null = null
const requestRefresh = () => {
const requestRefresh = (reason: string) => {
const now = Date.now()
const since = now - lastFireRef.t
@ -1355,7 +1337,7 @@ export default function App() {
lastFireRef.t = Date.now()
if (selectedTabRef.current === 'finished') {
void loadDoneCount()
requestFinishedReload('done-stream coalesced requestRefresh')
requestFinishedReload(`done-stream coalesced (${reason})`)
} else {
void loadDoneCount()
}
@ -1366,7 +1348,7 @@ export default function App() {
lastFireRef.t = now
if (selectedTabRef.current === 'finished') {
void loadDoneCount()
requestFinishedReload('done-stream requestRefresh')
requestFinishedReload(`done-stream (${reason})`)
} else {
void loadDoneCount()
}
@ -1375,23 +1357,17 @@ export default function App() {
// initial
void loadDoneCount()
es = new EventSource('/api/record/done/stream')
es.onopen = () => {
// ✅ sobald SSE stabil da ist: Poll aus
stopPoll()
}
es.onerror = () => {
// ✅ SSE kaputt -> Poll an
// ✅ Poll als Safety-Net an (wird automatisch "stumm", wenn SSE Events kommen)
startPoll()
}
const onDone = () => requestRefresh()
es.addEventListener('doneChanged', onDone as any)
// ✅ Unified SSE
const unsub = subscribeSSE<any>('/api/stream', 'doneChanged', (_data) => {
lastDoneSseAtRef.current = Date.now()
requestRefresh('sse event')
})
const onVis = () => {
if (!document.hidden) requestRefresh()
if (!document.hidden) requestRefresh('visibilitychange')
}
document.addEventListener('visibilitychange', onVis)
@ -1399,44 +1375,10 @@ export default function App() {
document.removeEventListener('visibilitychange', onVis)
if (coalesceTimer != null) window.clearTimeout(coalesceTimer)
stopPoll()
es?.removeEventListener('doneChanged', onDone as any)
es?.close()
es = null
unsub()
}
}, [authed, loadDoneCount, requestFinishedReload])
useEffect(() => {
if (!authed) return
// initial
void loadJobs()
// polling: schneller wenn running-tab offen oder jobs laufen
const t = window.setInterval(() => {
if (document.hidden) return
const hasRunning = jobsRef.current.some((j) => {
const s = String((j as any)?.status ?? '').toLowerCase()
return s === 'running' || s === 'postwork'
})
// wenn Tab "running" offen ODER irgendwas läuft -> häufiger pollen
if (selectedTabRef.current === 'running' || hasRunning) {
void loadJobs()
}
}, document.hidden ? 60000 : 3000) // 3s fühlt sich "live" an
const onVis = () => {
if (!document.hidden) void loadJobs()
}
document.addEventListener('visibilitychange', onVis)
return () => {
window.clearInterval(t)
document.removeEventListener('visibilitychange', onVis)
}
}, [authed, loadJobs])
function isChaturbate(raw: string): boolean {
const norm = normalizeHttpUrl(raw)
if (!norm) return false

View File

@ -11,7 +11,6 @@ import ProgressBar from './ProgressBar'
import RecordJobActions from './RecordJobActions'
import { PauseIcon, PlayIcon } from '@heroicons/react/24/solid'
import { subscribeSSE } from '../../lib/sseSingleton'
import { useRecordJobsSSE } from '../../lib/useRecordJobsSSE'
import { useMediaQuery } from '../../lib/useMediaQuery'
type PendingWatchedRoom = WaitingModelRow & {
@ -736,7 +735,7 @@ export default function Downloads({
blurPreviews
}: Props) {
const jobsLive = useRecordJobsSSE(jobs)
const jobsLive = jobs
const isDesktop = useMediaQuery('(min-width: 640px)', true)
@ -778,7 +777,7 @@ export default function Downloads({
// danach: Stream (Singleton)
const unsub = subscribeSSE<AutostartState>(
'/api/autostart/state/stream',
'/api/stream',
'autostart',
(data) => {
const nextPaused = Boolean((data as any)?.paused)

View File

@ -23,7 +23,6 @@ import {
PhotoIcon,
SparklesIcon,
UsersIcon,
FilmIcon,
ClockIcon,
EyeIcon as EyeOutlineIcon,
} from '@heroicons/react/24/outline'
@ -32,7 +31,6 @@ import {
StarIcon as StarSolidIcon,
EyeIcon as EyeSolidIcon,
} from '@heroicons/react/24/solid'
import { useMediaQuery } from '../../lib/useMediaQuery'
import FinishedVideoPreview from './FinishedVideoPreview'
import TagOverflowRow from './TagOverflowRow'
import PreviewScrubber from './PreviewScrubber'
@ -255,16 +253,6 @@ function pill(cls: string) {
const previewBlurCls = (blur?: boolean) =>
blur ? 'blur-md scale-[1.03] brightness-90' : ''
function niceFileLabel(file: string) {
const s = stripHotPrefix(file || '').trim()
return s || '—'
}
function endedLabel(job: RecordJob) {
const ended = (job as any).endedAt ?? (job as any).completedAt ?? job.endedAt
return ended ? shortDate(ended as any) : '—'
}
function firstNonEmptyString(...values: unknown[]): string | undefined {
for (const v of values) {
if (typeof v === 'string') {
@ -536,7 +524,7 @@ export default function ModelDetails({
onStopJob
}: Props) {
const isDesktop = useMediaQuery('(min-width: 640px)')
//const isDesktop = useMediaQuery('(min-width: 640px)')
const [models, setModels] = React.useState<StoredModel[]>([])
const [, setModelsLoading] = React.useState(false)
@ -554,11 +542,13 @@ export default function ModelDetails({
const [running, setRunning] = React.useState<RecordJob[]>([])
const [runningLoading, setRunningLoading] = React.useState(false)
const runningReqSeqRef = React.useRef(0)
const [bioRefreshSeq, setBioRefreshSeq] = React.useState(0)
const [imgViewer, setImgViewer] = React.useState<{ src: string; alt?: string } | null>(null)
const [runningHover, setRunningHover] = React.useState(false)
const [, setRunningHover] = React.useState(false)
const [stopPending, setStopPending] = React.useState(false)
@ -834,26 +824,31 @@ export default function ModelDetails({
if (!open) return
if (Array.isArray(runningJobs)) return
let alive = true
const ac = new AbortController()
const seq = ++runningReqSeqRef.current
setRunningLoading(true)
fetch('/api/record/jobs', { cache: 'no-store' })
fetch('/api/record/jobs', { cache: 'no-store', signal: ac.signal })
.then((r) => r.json())
.then((data: RecordJob[]) => {
if (!alive) return
if (ac.signal.aborted) return
if (runningReqSeqRef.current !== seq) return
setRunning(Array.isArray(data) ? data : [])
})
.catch(() => {
if (!alive) return
if (ac.signal.aborted) return
if (runningReqSeqRef.current !== seq) return
setRunning([])
})
.finally(() => {
if (!alive) return
if (ac.signal.aborted) return
if (runningReqSeqRef.current !== seq) return
setRunningLoading(false)
})
return () => {
alive = false
ac.abort()
}
}, [open, runningJobs])
@ -864,10 +859,6 @@ export default function ModelDetails({
const doneMatches = done
const doneTotalPages = React.useMemo(() => {
return Math.max(1, Math.ceil(doneTotalCount / DONE_PAGE_SIZE))
}, [doneTotalCount])
const runningMatches = React.useMemo(() => {
if (!key) return []
return runningList.filter((j) => {
@ -876,9 +867,6 @@ export default function ModelDetails({
})
}, [runningList, key])
// ✅ Running-Hero: wenn es einen laufenden Job für dieses Model gibt, nimm dessen Preview
const runningHeroJob = runningMatches.length ? runningMatches[0] : null
const allTags = React.useMemo(() => {
const a = splitTags(model?.tags)
const b = Array.isArray(room?.tags) ? room!.tags : []
@ -947,22 +935,6 @@ export default function ModelDetails({
return id ? `${id}::${out}` : out
}, [])
const addToSet = (setState: React.Dispatch<React.SetStateAction<Set<string>>>, k: string) =>
setState((prev) => {
if (prev.has(k)) return prev
const next = new Set(prev)
next.add(k)
return next
})
const delFromSet = (setState: React.Dispatch<React.SetStateAction<Set<string>>>, k: string) =>
setState((prev) => {
if (!prev.has(k)) return prev
const next = new Set(prev)
next.delete(k)
return next
})
const handleToggleHot = React.useCallback(
async (job: RecordJob) => {
const out = job.output || ''
@ -1155,20 +1127,6 @@ export default function ModelDetails({
{ id: 'running', label: 'Running', count: runningMatches.length ? fmtInt(runningMatches.length) : undefined, disabled: runningLoading },
]
// ✅ Adapter: RecordJobActions erwartet void|boolean.
// Dein onToggleHot darf ein Objekt zurückgeben -> wir droppen das.
const onToggleHotAction = React.useCallback(
async (job: RecordJob): Promise<boolean> => {
try {
await onToggleHot?.(job)
return true
} catch {
return false
}
},
[onToggleHot]
)
return (
<Modal
open={open}
@ -2547,11 +2505,6 @@ export default function ModelDetails({
const dur = runtimeOf(j)
const size = formatBytes(sizeBytesOf(j))
// Flags: aktuelles Model
const isFav = Boolean(model?.favorite)
const isLiked = model?.liked === true
const isWatching = Boolean(model?.watching)
const cardTags = allTags
const modelImageSrc = firstNonEmptyString(heroImgFull, heroImg)
@ -2630,8 +2583,6 @@ export default function ModelDetails({
const hasSpriteScrubber = hasScrubberUi && spriteCols > 0 && spriteRows > 0
const scrubberCount = hasScrubberUi ? spriteCount : 0
const scrubberStepSeconds = hasScrubberUi ? spriteStepSeconds : 0
const hasScrubber = hasScrubberUi
const activeScrubIndex = scrubIndexByKey[k]
const scrubProgressRatio =

View File

@ -115,12 +115,15 @@ export default function PerformanceMonitor({
const LOW_FREE_BYTES = 5 * 1024 * 1024 * 1024 // 5 GB
const RESET_BYTES = 8 * 1024 * 1024 * 1024 // Hysterese (8 GB)
const emergencyRef = React.useRef(false)
const lastSetAtRef = React.useRef(0)
React.useEffect(() => {
const url = `/api/perf/stream?ms=${encodeURIComponent(String(pollMs))}`
const unsub = subscribeSSE<any>('/api/stream', 'perf', (data) => {
// optional throttle auf pollMs
const now = Date.now()
if (now - lastSetAtRef.current < pollMs) return
lastSetAtRef.current = now
const unsub = subscribeSSE<any>(url, 'perf', (data) => {
const v = typeof data?.cpuPercent === 'number' ? data.cpuPercent : null
const free = typeof data?.diskFreeBytes === 'number' ? data.diskFreeBytes : null
const total = typeof data?.diskTotalBytes === 'number' ? data.diskTotalBytes : null
@ -138,7 +141,6 @@ export default function PerformanceMonitor({
return () => unsub()
}, [pollMs])
// -------------------------
// Meter config
// -------------------------

View File

@ -10,7 +10,6 @@ import TaskList from './TaskList'
import type { TaskItem } from './TaskList'
import PostgresUrlModal from './PostgresUrlModal'
import { CheckIcon, XMarkIcon } from '@heroicons/react/24/solid'
import { ArrowDownTrayIcon } from '@heroicons/react/24/outline'
type RecorderSettings = {
databaseUrl?: string
@ -547,7 +546,6 @@ export default function RecorderSettings({ onAssetsGenerated }: Props) {
startingLabel="Starte…"
startUrl="/api/tasks/generate-assets"
stopUrl="/api/tasks/generate-assets"
sseUrl="/api/tasks/assets/stream"
onFinished={onAssetsGenerated}
onStart={(ac) => {
assetsAbortRef.current = ac

View File

@ -1,5 +1,4 @@
// frontend\src\components\ui\Task.tsx
'use client'
import { useEffect, useRef, useState } from 'react'
@ -25,7 +24,6 @@ type Props = {
/** API-Endpunkte (optional, wenn onTrigger verwendet wird) */
startUrl?: string
stopUrl?: string
sseUrl?: string
/** Optional: lokaler Trigger statt API/SSE */
onTrigger?: () => Promise<void> | void
@ -72,10 +70,15 @@ async function fetchJSON<T>(url: string, init?: RequestInit): Promise<T> {
return data as T
}
const STREAM_URL = '/api/stream'
// ⚠️ Aktuell ist "state" in deinem unified stream das Assets-Task-State-Event.
// Wenn du später mehrere SSE-Tasks willst, brauchst du entweder unterschiedliche eventNames
// oder ein taskId im Payload + Filter.
const TASK_STATE_EVENT = 'state'
export default function Task({
startUrl,
stopUrl,
sseUrl,
onTrigger,
title = 'Task',
description = 'Startet eine Hintergrundaufgabe. Fortschritt & Abbrechen oben in der Taskliste.',
@ -173,11 +176,14 @@ export default function Task({
}
}, [state?.running, state?.error, onFinished, onDone, onCancelled])
// SSE: State + Progress nur nach oben (TaskList), kein UI hier
// ✅ SSE: Immer /api/stream (aber NUR für API-Tasks; lokale Tasks wie Cleanup subscriben NICHT)
useEffect(() => {
if (!sseUrl) return
// Lokaler Task (onTrigger) -> kein SSE
if (onTrigger) return
// Ohne startUrl ist es kein API-Task -> kein SSE
if (!startUrl) return
const unsub = subscribeSSE<TaskState>(sseUrl, 'state', (st) => {
const unsub = subscribeSSE<TaskState>(STREAM_URL, TASK_STATE_EVENT, (st) => {
setState(st)
if (st?.running) {
@ -200,7 +206,7 @@ export default function Task({
})
return () => unsub()
}, [sseUrl])
}, [startUrl, onTrigger])
async function start() {
if (busy) return
@ -273,9 +279,7 @@ export default function Task({
<div className="mt-0.5 text-xs text-gray-600 dark:text-gray-300">{description}</div>
{startError ? (
<div className="mt-2 text-xs text-red-700 dark:text-red-200">
{startError}
</div>
<div className="mt-2 text-xs text-red-700 dark:text-red-200">{startError}</div>
) : null}
</div>

View File

@ -1,191 +0,0 @@
// frontend/src/lib/chaturbateOnlinePoller.ts
export type ChaturbateOnlineRoom = {
username?: string
current_show?: string
chat_room_url?: string
image_url?: string
}
export type ChaturbateOnlineResponse = {
enabled: boolean
rooms: ChaturbateOnlineRoom[]
total?: number
}
type OnlineState = ChaturbateOnlineResponse
function chunk<T>(arr: T[], size: number): T[][] {
const out: T[][] = []
for (let i = 0; i < arr.length; i += size) out.push(arr.slice(i, i + size))
return out
}
function dedupeRooms(rooms: ChaturbateOnlineRoom[]): ChaturbateOnlineRoom[] {
const seen = new Set<string>()
const out: ChaturbateOnlineRoom[] = []
for (const r of rooms) {
const u = String(r?.username ?? '').trim().toLowerCase()
if (!u || seen.has(u)) continue
seen.add(u)
out.push(r)
}
return out
}
export function startChaturbateOnlinePolling(opts: {
getModels: () => string[]
getShow: () => string[]
onData: (data: OnlineState) => void
intervalMs?: number
// ✅ NEU: wenn getModels() leer ist, trotzdem einmal call machen (für "ALL online")
fetchAllWhenNoModels?: boolean
/** Optional: wird bei Fehlern aufgerufen (für Debug) */
onError?: (err: unknown) => void
}) {
const baseIntervalMs = opts.intervalMs ?? 5000
let timer: number | null = null
let inFlight: AbortController | null = null
let lastKey = ''
let lastResult: OnlineState | null = null
let stopped = false
const clearTimer = () => {
if (timer != null) {
window.clearTimeout(timer)
timer = null
}
}
const closeInFlight = () => {
if (inFlight) {
try {
inFlight.abort()
} catch {}
inFlight = null
}
}
const schedule = (ms: number) => {
if (stopped) return
clearTimer()
timer = window.setTimeout(() => void tick(), ms)
}
const tick = async () => {
if (stopped) return
try {
const models = (opts.getModels?.() ?? [])
.map((x) => String(x || '').trim())
.filter(Boolean)
const showRaw = (opts.getShow?.() ?? [])
.map((x) => String(x || '').trim())
.filter(Boolean)
// stabilisieren
const show = showRaw.slice().sort()
const modelsSorted = models.slice().sort()
// ✅ ALL-mode, wenn keine Models und Option aktiv
const isAllMode = modelsSorted.length === 0 && Boolean(opts.fetchAllWhenNoModels)
// keine Models -> normalerweise rooms leeren (enabled nicht neu erfinden)
if (modelsSorted.length === 0 && !isAllMode) {
closeInFlight()
const empty: OnlineState = { enabled: lastResult?.enabled ?? false, rooms: [] }
lastResult = empty
opts.onData(empty)
const nextMs = document.hidden ? Math.max(15000, baseIntervalMs) : baseIntervalMs
schedule(nextMs)
return
}
// ✅ In ALL-mode senden wir q:[] (1 Request). Sonst normale Liste.
const modelsForRequest = isAllMode ? [] : modelsSorted
const key = `${show.join(',')}|${isAllMode ? '__ALL__' : modelsForRequest.join(',')}`
const requestKey = key
lastKey = key
// dedupe / cancel previous
closeInFlight()
const controller = new AbortController()
inFlight = controller
const CHUNK_SIZE = 350 // wenn du extrem viele Keys hast: 200300 nehmen
// ✅ ALL-mode: genau ein Part mit [] schicken
const parts = isAllMode ? [[]] : chunk(modelsForRequest, CHUNK_SIZE)
let mergedRooms: ChaturbateOnlineRoom[] = []
let mergedEnabled = false
let mergedTotal = 0
let hadAnyOk = false
for (const part of parts) {
if (controller.signal.aborted) return
if (requestKey !== lastKey) return
if (stopped) return
const res = await fetch('/api/chaturbate/online', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ q: part, show, refresh: false }),
signal: controller.signal,
cache: 'no-store',
})
if (!res.ok) continue
hadAnyOk = true
const data = (await res.json()) as OnlineState
mergedEnabled = mergedEnabled || Boolean(data?.enabled)
mergedRooms.push(...(Array.isArray(data?.rooms) ? data.rooms : []))
// ✅ NEU: total mergen (Backend liefert Gesamtzahl)
const t = Number((data as any)?.total ?? 0)
if (Number.isFinite(t) && t > mergedTotal) mergedTotal = t
}
if (!hadAnyOk) {
const nextMs = document.hidden ? Math.max(15000, baseIntervalMs) : baseIntervalMs
schedule(nextMs)
return
}
const merged: OnlineState = { enabled: mergedEnabled, rooms: dedupeRooms(mergedRooms), total: mergedTotal }
if (controller.signal.aborted) return
if (requestKey !== lastKey) return
if (stopped) return
lastResult = merged
opts.onData(merged)
} catch (e: any) {
if (e?.name === 'AbortError') return
opts.onError?.(e)
} finally {
// ✅ adaptive backoff: hidden tab = viel seltener pollen
const nextMs = document.hidden ? Math.max(15000, baseIntervalMs) : baseIntervalMs
schedule(nextMs)
}
}
// sofort einmal
void tick()
// stop function
return () => {
stopped = true
clearTimer()
closeInFlight()
}
}

View File

@ -1,32 +0,0 @@
'use client'
import { useEffect, useRef, useState } from 'react'
import type { RecordJob } from '../types'
import { subscribeSSE } from './sseSingleton'
export function useRecordJobsSSE(initialJobs: RecordJob[]) {
const [jobs, setJobs] = useState<RecordJob[]>(initialJobs)
// optional: super simple dedupe (hilft, falls Server identische snapshots pusht)
const lastLenRef = useRef<number>(initialJobs.length)
useEffect(() => {
const unsub = subscribeSSE<RecordJob[]>(
'/api/record/stream',
'jobs',
(data) => {
if (!Array.isArray(data)) return
// kleine Heuristik gegen “same snapshot” (billig)
if (data.length === lastLenRef.current) {
// trotzdem setzen ist ok; wenn du härter dedupen willst, siehe Stufe 2/3
}
lastLenRef.current = data.length
setJobs(data)
}
)
return () => unsub()
}, [])
return jobs
}