diff --git a/backend/analyze.go b/backend/analyze.go new file mode 100644 index 0000000..7325a85 --- /dev/null +++ b/backend/analyze.go @@ -0,0 +1,819 @@ +// backend\analyze.go + +package main + +import ( + "bytes" + "context" + "encoding/base64" + "encoding/json" + "fmt" + "image" + "image/draw" + "image/jpeg" + "math" + "net/http" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" + "time" + + "golang.org/x/image/webp" +) + +type analyzeVideoReq struct { + JobID string `json:"jobId"` + Output string `json:"output"` + Mode string `json:"mode"` // "sprite" | "video" + Goal string `json:"goal"` // "highlights" | "nsfw" +} + +type analyzeHit struct { + Time float64 `json:"time"` + Label string `json:"label"` + Score float64 `json:"score,omitempty"` + Start float64 `json:"start,omitempty"` + End float64 `json:"end,omitempty"` +} + +type analyzeVideoResp struct { + OK bool `json:"ok"` + Mode string `json:"mode,omitempty"` + Goal string `json:"goal,omitempty"` + Hits []analyzeHit `json:"hits"` + Segments []aiSegmentMeta `json:"segments,omitempty"` + Error string `json:"error,omitempty"` +} + +type spriteFrameCandidate struct { + Index int + Time float64 +} + +const ( + nsfwThresholdModerate = 0.35 + nsfwThresholdStrong = 0.60 +) + +var autoSelectedAILabels = map[string]struct{}{ + "anus_exposed": {}, + "female_genitalia_exposed": {}, + "male_genitalia_exposed": {}, + "female_breast_exposed": {}, + "buttocks_exposed": {}, +} + +var nsfwIgnoredLabels = map[string]struct{}{ + "face_female": {}, + "face_male": {}, + "belly_covered": {}, + "armpits_covered": {}, + "anus_covered": {}, +} + +func shouldAutoSelectAnalyzeHit(label string) bool { + label = strings.ToLower(strings.TrimSpace(label)) + _, ok := autoSelectedAILabels[label] + return ok +} + +func isIgnoredNSFWLabel(label string) bool { + label = strings.ToLower(strings.TrimSpace(label)) + _, ok := nsfwIgnoredLabels[label] + return ok +} + +func extractSpriteFrames(spritePath string, ps previewSpriteMetaFileInfo) ([]image.Image, error) { + f, err := os.Open(spritePath) + if err != nil { + return nil, err + } + defer f.Close() + + img, err := webp.Decode(f) + if err != nil { + return nil, err + } + + b := img.Bounds() + if ps.Cols <= 0 || ps.Rows <= 0 { + return nil, fmt.Errorf("sprite cols/rows fehlen") + } + + cellW := b.Dx() / ps.Cols + cellH := b.Dy() / ps.Rows + if cellW <= 0 || cellH <= 0 { + return nil, fmt.Errorf("ungültige sprite cell size") + } + + count := ps.Count + if count <= 0 { + count = ps.Cols * ps.Rows + } + + out := make([]image.Image, 0, count) + + for i := 0; i < count; i++ { + col := i % ps.Cols + row := i / ps.Cols + if row >= ps.Rows { + break + } + + srcRect := image.Rect( + b.Min.X+col*cellW, + b.Min.Y+row*cellH, + b.Min.X+(col+1)*cellW, + b.Min.Y+(row+1)*cellH, + ) + + dst := image.NewRGBA(image.Rect(0, 0, cellW, cellH)) + draw.Draw(dst, dst.Bounds(), img, srcRect.Min, draw.Src) + out = append(out, dst) + } + + return out, nil +} + +func encodeImageJPEGBase64(img image.Image) (string, error) { + var buf bytes.Buffer + if err := jpeg.Encode(&buf, img, &jpeg.Options{Quality: 85}); err != nil { + return "", err + } + return base64.StdEncoding.EncodeToString(buf.Bytes()), nil +} + +func classifyFrameNSFW(ctx context.Context, img image.Image) (*NsfwImageResponse, error) { + _ = ctx + + b64, err := encodeImageJPEGBase64(img) + if err != nil { + return nil, err + } + + results, err := detectNSFWFromBase64(b64) + if err != nil { + return nil, err + } + + return &NsfwImageResponse{ + Ok: true, + Results: results, + }, nil +} + +func nsfwLabelPriority(label string) int { + label = strings.ToLower(strings.TrimSpace(label)) + + switch label { + case + "anus_exposed", + "female_genitalia_exposed", + "male_genitalia_exposed", + "female_breast_exposed", + "buttocks_exposed": + return 300 + + case + "female_genitalia_covered", + "male_genitalia_covered", + "female_breast_covered", + "buttocks_covered", + "male_breast_exposed", + "male_breast_covered": + return 200 + + case + "belly_exposed", + "armpits_exposed", + "feet_exposed", + "feet_covered": + return 100 + + case + "face_female", + "face_male", + "belly_covered", + "armpits_covered", + "anus_covered": + return 10 + + default: + return 0 + } +} + +func pickBestNSFWResult(results []NsfwFrameResult) (string, float64) { + bestLabel := "" + bestScore := 0.0 + bestPriority := -1 + + for _, r := range results { + label := strings.ToLower(strings.TrimSpace(r.Label)) + if label == "" { + continue + } + if isIgnoredNSFWLabel(label) { + continue + } + + score := r.Score + priority := nsfwLabelPriority(label) + + if priority > bestPriority { + bestLabel = label + bestScore = score + bestPriority = priority + continue + } + + if priority == bestPriority && score > bestScore { + bestLabel = label + bestScore = score + bestPriority = priority + } + } + + return bestLabel, bestScore +} + +func extractVideoFrameAt(ctx context.Context, outPath string, atSec float64) (image.Image, error) { + tmp, err := os.CreateTemp("", "nsfw-frame-*.jpg") + if err != nil { + return nil, err + } + tmpPath := tmp.Name() + _ = tmp.Close() + defer os.Remove(tmpPath) + + ffmpegPath := strings.TrimSpace(getSettings().FFmpegPath) + if ffmpegPath == "" { + ffmpegPath = "ffmpeg" + } + + cmd := exec.CommandContext( + ctx, + ffmpegPath, + "-ss", fmt.Sprintf("%.3f", atSec), + "-i", outPath, + "-frames:v", "1", + "-q:v", "2", + "-y", + tmpPath, + ) + + if out, err := cmd.CombinedOutput(); err != nil { + return nil, fmt.Errorf("ffmpeg fehlgeschlagen: %v: %s", err, strings.TrimSpace(string(out))) + } + + f, err := os.Open(tmpPath) + if err != nil { + return nil, err + } + defer f.Close() + + img, _, err := image.Decode(f) + if err != nil { + return nil, err + } + + return img, nil +} + +func recordAnalyzeVideo(w http.ResponseWriter, r *http.Request) { + if !mustMethod(w, r, http.MethodPost) { + return + } + + var req analyzeVideoReq + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "ungültiger body: "+err.Error(), http.StatusBadRequest) + return + } + + req.Mode = strings.ToLower(strings.TrimSpace(req.Mode)) + req.Goal = strings.ToLower(strings.TrimSpace(req.Goal)) + + if req.Mode == "" { + req.Mode = "sprite" + } + if req.Goal == "" { + req.Goal = "highlights" + } + + switch req.Mode { + case "sprite", "video": + default: + http.Error(w, "mode muss 'sprite' oder 'video' sein", http.StatusBadRequest) + return + } + + switch req.Goal { + case "highlights", "nsfw": + default: + http.Error(w, "goal muss 'highlights' oder 'nsfw' sein", http.StatusBadRequest) + return + } + + outPath := strings.TrimSpace(req.Output) + if outPath == "" { + http.Error(w, "output fehlt", http.StatusBadRequest) + return + } + + fi, err := os.Stat(outPath) + if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 { + http.Error(w, "output datei nicht gefunden", http.StatusNotFound) + return + } + + ctx, cancel := context.WithTimeout(r.Context(), 45*time.Second) + defer cancel() + + var hits []analyzeHit + + switch req.Mode { + case "sprite": + hits, err = analyzeVideoFromSprite(ctx, outPath, req.Goal) + case "video": + hits, err = analyzeVideoFromFrames(ctx, outPath, req.Goal) + } + + if err != nil { + respondJSON(w, analyzeVideoResp{ + OK: false, + Mode: req.Mode, + Goal: req.Goal, + Hits: []analyzeHit{}, + Error: err.Error(), + }) + return + } + + durationSec, _ := durationSecondsForAnalyze(ctx, outPath) + segments := buildSegmentsFromAnalyzeHits(hits, durationSec) + + ai := &aiAnalysisMeta{ + Goal: req.Goal, + Mode: req.Mode, + Hits: hits, + Segments: segments, + AnalyzedAtUnix: time.Now().Unix(), + } + + if err := writeVideoAIForFile(ctx, outPath, "", ai); err != nil { + fmt.Println("⚠️ writeVideoAIForFile:", err) + } + + respondJSON(w, analyzeVideoResp{ + OK: true, + Mode: req.Mode, + Goal: req.Goal, + Hits: hits, + Segments: segments, + }) +} + +func analyzeVideoFromSprite(ctx context.Context, outPath, goal string) ([]analyzeHit, error) { + id := strings.TrimSpace(videoIDFromOutputPath(outPath)) + if id == "" { + return nil, fmt.Errorf("konnte keine video-id aus output ableiten") + } + + metaPath, err := generatedMetaFile(id) + if err != nil || strings.TrimSpace(metaPath) == "" { + return nil, fmt.Errorf("meta.json nicht gefunden") + } + + ps, ok := readPreviewSpriteMetaFromMetaFile(metaPath) + if !ok { + return nil, fmt.Errorf("previewSprite meta fehlt") + } + if ps.Count <= 0 { + return nil, fmt.Errorf("previewSprite count fehlt") + } + + spritePath := filepath.Join(filepath.Dir(metaPath), "preview-sprite.webp") + if fi, err := os.Stat(spritePath); err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 { + return nil, fmt.Errorf("preview-sprite.webp nicht gefunden") + } + + durationSec, _ := durationSecondsForAnalyze(ctx, outPath) + + candidates := buildSpriteFrameCandidates(ps.Count, ps.StepSeconds, durationSec) + if len(candidates) == 0 { + return nil, fmt.Errorf("keine sprite-kandidaten vorhanden") + } + + // ---------------------------------------------------------------- + // HIER ist der Hook für echte AI/Vision-Analyse. + // + // Aktuell: + // - erzeugen wir brauchbare Zeitpunkte aus den Preview-Frames + // - gruppieren sie zu Treffern + // + // Später kannst du hier: + // - spritePath + frame indices an ein Vision-Modell geben + // - pro Frame Labels / Scores zurückbekommen + // - daraus Trefferbereiche bilden + // ---------------------------------------------------------------- + frameHits, err := analyzeSpriteCandidatesWithAI(ctx, spritePath, ps, candidates, goal) + if err != nil { + return nil, err + } + + return mergeAnalyzeHits(frameHits), nil +} + +func nsfwThresholdForLabel(label string) float64 { + label = strings.ToLower(strings.TrimSpace(label)) + + switch label { + case + "anus_exposed", + "female_genitalia_exposed", + "male_genitalia_exposed", + "female_breast_exposed", + "buttocks_exposed": + return nsfwThresholdStrong + + case + "female_breast_covered", + "male_breast_exposed", + "male_breast_covered", + "buttocks_covered", + "female_genitalia_covered", + "male_genitalia_covered", + "belly_exposed", + "armpits_exposed", + "feet_exposed", + "feet_covered": + return nsfwThresholdModerate + + default: + return 0.50 + } +} + +func analyzeVideoFromFrames(ctx context.Context, outPath, goal string) ([]analyzeHit, error) { + if goal != "nsfw" { + return []analyzeHit{}, nil + } + + durationSec, _ := durationSecondsForAnalyze(ctx, outPath) + if durationSec <= 0 { + return nil, fmt.Errorf("videolänge konnte nicht bestimmt werden") + } + + sampleTimes := buildVideoSampleTimes(durationSec, 24) + if len(sampleTimes) == 0 { + return nil, fmt.Errorf("keine frame-samples berechnet") + } + + hits := make([]analyzeHit, 0, len(sampleTimes)) + + for _, t := range sampleTimes { + img, err := extractVideoFrameAt(ctx, outPath, t) + if err != nil { + return nil, fmt.Errorf("frame extraktion bei %.3fs fehlgeschlagen: %w", t, err) + } + + res, err := classifyFrameNSFW(ctx, img) + if err != nil { + continue + } + + bestLabel, bestScore := pickBestNSFWResult(res.Results) + if bestLabel == "" { + continue + } + + threshold := nsfwThresholdForLabel(bestLabel) + + if bestScore < threshold { + continue + } + + hits = append(hits, analyzeHit{ + Time: t, + Label: bestLabel, + Score: bestScore, + Start: math.Max(0, t-4), + End: t + 4, + }) + } + + return mergeAnalyzeHits(hits), nil +} + +func analyzeSpriteCandidatesWithAI( + ctx context.Context, + spritePath string, + ps previewSpriteMetaFileInfo, + candidates []spriteFrameCandidate, + goal string, +) ([]analyzeHit, error) { + if goal != "nsfw" { + return []analyzeHit{}, nil + } + + frames, err := extractSpriteFrames(spritePath, ps) + if err != nil { + return nil, fmt.Errorf("sprite frames extrahieren fehlgeschlagen: %w", err) + } + + hits := make([]analyzeHit, 0, len(candidates)) + + for _, c := range candidates { + if c.Index < 0 || c.Index >= len(frames) { + continue + } + + res, err := classifyFrameNSFW(ctx, frames[c.Index]) + if err != nil { + continue + } + + bestLabel, bestScore := pickBestNSFWResult(res.Results) + if bestLabel == "" { + continue + } + + threshold := nsfwThresholdForLabel(bestLabel) + + if bestScore < threshold { + continue + } + + span := inferredSpanSeconds(ps.StepSeconds, 8) + start := math.Max(0, c.Time-(span/2)) + end := c.Time + (span / 2) + + hits = append(hits, analyzeHit{ + Time: c.Time, + Label: bestLabel, + Score: bestScore, + Start: start, + End: end, + }) + } + + return hits, nil +} + +func mergeAnalyzeHits(in []analyzeHit) []analyzeHit { + if len(in) == 0 { + return []analyzeHit{} + } + + cp := make([]analyzeHit, 0, len(in)) + for _, h := range in { + label := strings.ToLower(strings.TrimSpace(h.Label)) + if label == "" { + continue + } + if isIgnoredNSFWLabel(label) { + continue + } + + start := h.Start + end := h.End + + if start <= 0 && end <= 0 { + start = h.Time + end = h.Time + } else { + if start <= 0 { + start = h.Time + } + if end <= 0 { + end = h.Time + } + } + + h.Label = label + h.Start = start + h.End = end + cp = append(cp, h) + } + + if len(cp) == 0 { + return []analyzeHit{} + } + + sort.Slice(cp, func(i, j int) bool { + if cp[i].Start != cp[j].Start { + return cp[i].Start < cp[j].Start + } + if cp[i].End != cp[j].End { + return cp[i].End < cp[j].End + } + return cp[i].Label < cp[j].Label + }) + + out := make([]analyzeHit, 0, len(cp)) + cur := cp[0] + + for i := 1; i < len(cp); i++ { + n := cp[i] + + // Nur direkt aufeinanderfolgende Treffer mit gleichem Label zusammenfassen + const mergeGapSeconds = 1.0 + + sameLabel := strings.EqualFold(cur.Label, n.Label) + touchesOrNear := n.Start <= cur.End+mergeGapSeconds + + if sameLabel && touchesOrNear { + if n.Start < cur.Start { + cur.Start = n.Start + } + if n.End > cur.End { + cur.End = n.End + } + if n.Score > cur.Score { + cur.Score = n.Score + } + cur.Time = (cur.Start + cur.End) / 2 + continue + } + + out = append(out, cur) + cur = n + } + + out = append(out, cur) + return out +} + +func buildSegmentsFromAnalyzeHits(hits []analyzeHit, duration float64) []aiSegmentMeta { + if len(hits) == 0 || duration <= 0 { + return []aiSegmentMeta{} + } + + out := make([]aiSegmentMeta, 0, len(hits)) + + for _, hit := range hits { + if !shouldAutoSelectAnalyzeHit(hit.Label) { + continue + } + + start := hit.Start + end := hit.End + + if start <= 0 && end <= 0 { + start = hit.Time + end = hit.Time + } else { + if start <= 0 { + start = hit.Time + } + if end <= 0 { + end = hit.Time + } + } + + if start > end { + start, end = end, start + } + + start = math.Max(0, math.Min(start, duration)) + end = math.Max(0, math.Min(end, duration)) + + if end <= start { + continue + } + + out = append(out, aiSegmentMeta{ + Label: strings.ToLower(strings.TrimSpace(hit.Label)), + StartSeconds: start, + EndSeconds: end, + DurationSeconds: end - start, + Score: hit.Score, + AutoSelected: true, + }) + } + + if len(out) == 0 { + return []aiSegmentMeta{} + } + + sort.Slice(out, func(i, j int) bool { + if out[i].StartSeconds != out[j].StartSeconds { + return out[i].StartSeconds < out[j].StartSeconds + } + if out[i].EndSeconds != out[j].EndSeconds { + return out[i].EndSeconds < out[j].EndSeconds + } + return out[i].Label < out[j].Label + }) + + merged := make([]aiSegmentMeta, 0, len(out)) + cur := out[0] + + for i := 1; i < len(out); i++ { + n := out[i] + + const mergeGapSeconds = 15.0 + + sameLabel := strings.EqualFold(cur.Label, n.Label) + nearEnough := n.StartSeconds <= cur.EndSeconds+mergeGapSeconds + + if sameLabel && nearEnough { + if n.StartSeconds < cur.StartSeconds { + cur.StartSeconds = n.StartSeconds + } + if n.EndSeconds > cur.EndSeconds { + cur.EndSeconds = n.EndSeconds + } + cur.DurationSeconds = cur.EndSeconds - cur.StartSeconds + if n.Score > cur.Score { + cur.Score = n.Score + } + cur.AutoSelected = cur.AutoSelected || n.AutoSelected + continue + } + + merged = append(merged, cur) + cur = n + } + + merged = append(merged, cur) + return merged +} + +func buildSpriteFrameCandidates(count int, stepSeconds, durationSec float64) []spriteFrameCandidate { + if count <= 0 { + return nil + } + + out := make([]spriteFrameCandidate, 0, count) + + stepLooksUsable := false + if stepSeconds > 0 && durationSec > 0 { + coverage := stepSeconds * math.Max(1, float64(count-1)) + stepLooksUsable = coverage >= durationSec*0.7 && coverage <= durationSec*1.3 + } + + for i := 0; i < count; i++ { + var t float64 + + if stepLooksUsable { + t = float64(i) * stepSeconds + } else if durationSec > 0 && count > 1 { + t = (float64(i) / float64(count-1)) * durationSec + } else if stepSeconds > 0 { + t = float64(i) * stepSeconds + } else { + t = float64(i) + } + + out = append(out, spriteFrameCandidate{ + Index: i, + Time: t, + }) + } + + return out +} + +func buildVideoSampleTimes(durationSec float64, sampleCount int) []float64 { + if durationSec <= 0 || sampleCount <= 0 { + return nil + } + if sampleCount == 1 { + return []float64{0} + } + + out := make([]float64, 0, sampleCount) + for i := 0; i < sampleCount; i++ { + ratio := float64(i) / float64(sampleCount-1) + t := ratio * durationSec + out = append(out, t) + } + return out +} + +func inferredSpanSeconds(stepSeconds float64, fallback float64) float64 { + if stepSeconds > 0 { + return math.Max(2, stepSeconds*1.5) + } + return fallback +} + +func durationSecondsForAnalyze(ctx context.Context, outPath string) (float64, error) { + ctx2, cancel := context.WithTimeout(ctx, 8*time.Second) + defer cancel() + return durationSecondsCached(ctx2, outPath) +} + +func videoIDFromOutputPath(outPath string) string { + base := filepath.Base(strings.TrimSpace(outPath)) + if base == "" { + return "" + } + stem := strings.TrimSuffix(base, filepath.Ext(base)) + stem = stripHotPrefix(stem) + return strings.TrimSpace(stem) +} diff --git a/backend/assets/nsfw/320n.onnx b/backend/assets/nsfw/320n.onnx new file mode 100644 index 0000000..68eedc3 Binary files /dev/null and b/backend/assets/nsfw/320n.onnx differ diff --git a/backend/assets/nsfw/onnxruntime.dll b/backend/assets/nsfw/onnxruntime.dll new file mode 100644 index 0000000..786b68d Binary files /dev/null and b/backend/assets/nsfw/onnxruntime.dll differ diff --git a/backend/assets_generate.go b/backend/assets_generate.go index 6f8b556..e7dd04d 100644 --- a/backend/assets_generate.go +++ b/backend/assets_generate.go @@ -1,8 +1,9 @@ -// backend/generate.go +// backend\assets_generate.go package main import ( "context" + "encoding/json" "errors" "fmt" "math" @@ -267,8 +268,18 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU meta, _ := ensureVideoMeta(ctx, videoPath, metaPath, sourceURL, fi) out.MetaOK = meta.ok - // Wenn alles da ist: skipped - if thumbBefore && previewBefore && spriteBefore && meta.ok { + // Wenn alles da ist: als skipped markieren, + // aber NICHT sofort returnen, damit meta.json + // (previewClips / previewSprite) trotzdem sauber geschrieben wird. + metaHasSprite := false + if oldMeta, ok := readVideoMetaIfValid(metaPath, fi); ok && oldMeta != nil && oldMeta.PreviewSprite != nil { + metaHasSprite = true + } + + metaHasAI := hasAIResultsForOutput(videoPath) + + // Nur dann wirklich komplett "fertig", wenn auch AI vorhanden ist + if thumbBefore && previewBefore && spriteBefore && meta.ok && metaHasSprite && metaHasAI { out.Skipped = true progress(1) return out, nil @@ -412,38 +423,31 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU } // ---------------- - // Preview Sprite (stashapp-like scrubber) + // Preview Sprite (festes Layout) // ---------------- var spriteMeta *previewSpriteMeta - if spriteBefore { - // Meta trotzdem vorbereiten (für JSON) - if meta.durSec > 0 { - stepSec := 5.0 - count := int(math.Floor(meta.durSec/stepSec)) + 1 - if count < 1 { - count = 1 - } - if count > 200 { - count = 200 // Schutz - } - cols, rows := chooseSpriteGrid(count) + if meta.durSec > 0 { + cols, rows, count, cellW, cellH := fixedPreviewSpriteLayout() + stepSec := previewSpriteStepSeconds(meta.durSec) - spriteMeta = &previewSpriteMeta{ - Path: fmt.Sprintf("/api/preview-sprite/%s", id), - Count: count, - Cols: cols, - Rows: rows, - StepSeconds: stepSec, - } + spriteMeta = &previewSpriteMeta{ + Path: fmt.Sprintf("/api/preview-sprite/%s", id), + Count: count, + Cols: cols, + Rows: rows, + StepSeconds: stepSec, + CellWidth: cellW, + CellHeight: cellH, } - } else { + } + + if !spriteBefore { func() { if sourceInputInvalid { return } - // nur sinnvoll wenn wir Dauer kennen if !(meta.durSec > 0) { return } @@ -456,37 +460,24 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU } defer genSem.Release() - stepSec := 5.0 - count := int(math.Floor(meta.durSec/stepSec)) + 1 - if count < 1 { - count = 1 - } - if count > 200 { - count = 200 // Schutz gegen riesige Sprites - } + cols, rows, _, cellW, cellH := fixedPreviewSpriteLayout() + stepSec := previewSpriteStepSeconds(meta.durSec) - cols, rows := chooseSpriteGrid(count) - - // Zellgröße (16:9) für Gallery-Thumbs - cellW := 160 - cellH := 90 - - if err := generatePreviewSpriteWebP(genCtx, videoPath, spritePath, cols, rows, stepSec, cellW, cellH); err != nil { + if err := generatePreviewSpriteWebP( + genCtx, + videoPath, + spritePath, + cols, + rows, + stepSec, + cellW, + cellH, + ); err != nil { fmt.Println("⚠️ preview sprite:", err) return } out.SpriteGenerated = true - - spriteMeta = &previewSpriteMeta{ - Path: fmt.Sprintf("/api/preview-sprite/%s", id), - Count: count, - Cols: cols, - Rows: rows, - StepSeconds: stepSec, - CellWidth: cellW, - CellHeight: cellH, - } }() } @@ -524,3 +515,120 @@ func ensureAssetsForVideoDetailed(ctx context.Context, videoPath string, sourceU progress(1) return out, nil } + +func hasAIResultsForOutput(outPath string) bool { + outPath = strings.TrimSpace(outPath) + if outPath == "" { + return false + } + + id := assetIDFromVideoPath(outPath) + if id == "" { + return false + } + + metaPath, err := generatedMetaFile(id) + if err != nil || strings.TrimSpace(metaPath) == "" { + return false + } + + b, err := os.ReadFile(metaPath) + if err != nil || len(b) == 0 { + return false + } + + var m map[string]any + dec := json.NewDecoder(strings.NewReader(string(b))) + dec.UseNumber() + if err := dec.Decode(&m); err != nil { + return false + } + + aiMap, ok := m["ai"].(map[string]any) + if !ok || aiMap == nil { + return false + } + + rawHits, hasHits := aiMap["hits"].([]any) + rawSegs, hasSegs := aiMap["segments"].([]any) + + return (hasHits && len(rawHits) > 0) || (hasSegs && len(rawSegs) > 0) +} + +type PrepareSplitResult struct { + AssetsReady bool + AnalyzeReady bool + SpriteReady bool + MetaOK bool +} + +func prepareVideoForSplit(ctx context.Context, videoPath, sourceURL, goal string) (PrepareSplitResult, error) { + var out PrepareSplitResult + + videoPath = strings.TrimSpace(videoPath) + if videoPath == "" { + return out, fmt.Errorf("empty videoPath") + } + + fi, err := os.Stat(videoPath) + if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 { + return out, fmt.Errorf("video datei nicht gefunden") + } + + // 1) Assets sicherstellen (preview.webp / preview.mp4 / preview-sprite.webp / meta.json) + assetsRes, err := ensureAssetsForVideoDetailed(ctx, videoPath, sourceURL, nil) + if err != nil { + return out, err + } + + _ = assetsRes + + id := assetIDFromVideoPath(videoPath) + if id == "" { + return out, fmt.Errorf("konnte asset id nicht ableiten") + } + + ps := previewSpriteTruthForID(id) + out.SpriteReady = ps.Exists + out.AssetsReady = ps.Exists + out.MetaOK = true + + // 2) AI-Segmente prüfen + if hasAIResultsForOutput(videoPath) { + out.AnalyzeReady = true + return out, nil + } + + goal = strings.ToLower(strings.TrimSpace(goal)) + if goal == "" { + goal = "nsfw" + } + + // 3) AI nur ausführen, wenn Sprite vorhanden ist + if !ps.Exists { + return out, nil + } + + durationSec, _ := durationSecondsForAnalyze(ctx, videoPath) + hits, aerr := analyzeVideoFromSprite(ctx, videoPath, goal) + if aerr != nil { + return out, nil + } + + segments := buildSegmentsFromAnalyzeHits(hits, durationSec) + + ai := &aiAnalysisMeta{ + Goal: goal, + Mode: "sprite", + Hits: hits, + Segments: segments, + AnalyzedAtUnix: time.Now().Unix(), + } + + if werr := writeVideoAIForFile(ctx, videoPath, sourceURL, ai); werr != nil { + return out, nil + } + + out.AnalyzeReady = len(segments) > 0 + return out, nil +} diff --git a/backend/assets_sprite.go b/backend/assets_sprite.go index f065bf8..f4d0b6b 100644 --- a/backend/assets_sprite.go +++ b/backend/assets_sprite.go @@ -5,53 +5,35 @@ package main import ( "context" "fmt" - "math" "os" "os/exec" "path/filepath" "strings" ) -// chooseSpriteGrid wählt ein sinnvolles cols/rows-Grid für count Frames. -// Ziel: wenig leere Zellen + eher horizontales Layout (passt gut zu 16:9 Cells). -func chooseSpriteGrid(count int) (cols, rows int) { - if count <= 0 { - return 1, 1 - } - if count == 1 { - return 1, 1 +const ( + previewSpriteCols = 10 + previewSpriteRows = 8 + previewSpriteFrameCount = previewSpriteCols * previewSpriteRows + previewSpriteCellW = 160 + previewSpriteCellH = 90 +) + +func fixedPreviewSpriteLayout() (cols, rows, count, cellW, cellH int) { + return previewSpriteCols, previewSpriteRows, previewSpriteFrameCount, previewSpriteCellW, previewSpriteCellH +} + +func previewSpriteStepSeconds(durationSec float64) float64 { + if durationSec <= 0 { + return 5 } - targetRatio := 16.0 / 9.0 // wir bevorzugen horizontale Spritesheets - bestCols, bestRows := 1, count - bestWaste := math.MaxInt - bestRatioScore := math.MaxFloat64 - - for c := 1; c <= count; c++ { - r := int(math.Ceil(float64(count) / float64(c))) - if r <= 0 { - r = 1 - } - - waste := c*r - count - ratio := float64(c) / float64(r) - ratioScore := math.Abs(ratio - targetRatio) - - // Priorität: - // 1) weniger leere Zellen - // 2) näher an targetRatio - // 3) bei Gleichstand weniger Rows (lieber breiter als hoch) - if waste < bestWaste || - (waste == bestWaste && ratioScore < bestRatioScore) || - (waste == bestWaste && ratioScore == bestRatioScore && r < bestRows) { - bestWaste = waste - bestRatioScore = ratioScore - bestCols = c - bestRows = r - } + step := durationSec / float64(previewSpriteFrameCount) + if step < 0.5 { + step = 0.5 } - return bestCols, bestRows + return step } // generatePreviewSpriteWebP erzeugt ein statisches WebP-Spritesheet aus einem Video. @@ -85,23 +67,22 @@ func generatePreviewSpriteWebP( return fmt.Errorf("generatePreviewSpriteWebP: invalid cell size %dx%d", cellW, cellH) } - // Zielordner sicherstellen if err := os.MkdirAll(filepath.Dir(outPath), 0o755); err != nil { return fmt.Errorf("mkdir sprite dir: %w", err) } - // Temp-Datei im gleichen Verzeichnis für atomaren Replace. - // Wichtig: ffmpeg erkennt das Output-Format über die Endung. - // Deshalb muss .webp am Ende stehen (nicht "...webp.tmp"). ext := filepath.Ext(outPath) if ext == "" { ext = ".webp" } base := strings.TrimSuffix(outPath, ext) - tmpPath := base + ".tmp" + ext // z.B. preview-sprite.tmp.webp + tmpPath := base + ".tmp" + ext + + ffmpegPath := strings.TrimSpace(getSettings().FFmpegPath) + if ffmpegPath == "" { + ffmpegPath = "ffmpeg" + } - // fps=1/stepSec nimmt alle stepSec Sekunden einen Frame - // scale+pad erzwingt feste Zellgröße (wichtig für korrektes background-positioning im Frontend) vf := fmt.Sprintf( "fps=1/%g,scale=%d:%d:force_original_aspect_ratio=decrease:flags=lanczos,"+ "pad=%d:%d:(ow-iw)/2:(oh-ih)/2:black,tile=%dx%d:margin=0:padding=0", @@ -111,10 +92,9 @@ func generatePreviewSpriteWebP( cols, rows, ) - // Statisches WebP-Spritesheet cmd := exec.CommandContext( ctx, - "ffmpeg", + ffmpegPath, "-hide_banner", "-loglevel", "error", "-y", @@ -122,6 +102,7 @@ func generatePreviewSpriteWebP( "-an", "-sn", "-vf", vf, + "-vsync", "vfr", "-frames:v", "1", "-c:v", "libwebp", "-lossless", "0", @@ -149,7 +130,6 @@ func generatePreviewSpriteWebP( return fmt.Errorf("sprite temp file invalid/empty") } - // Windows: Ziel vorher löschen, damit Rename klappt _ = os.Remove(outPath) if err := os.Rename(tmpPath, outPath); err != nil { _ = os.Remove(tmpPath) diff --git a/backend/disk_guard.go b/backend/disk_guard.go index 301c146..5d121ef 100644 --- a/backend/disk_guard.go +++ b/backend/disk_guard.go @@ -182,39 +182,41 @@ func inFlightBytesForJob(j *RecordJob) uint64 { return sizeOfPathBestEffort(j.Output) } -func minRelevantInFlightBytes() uint64 { - s := getSettings() - - // Nur wenn Auto-Delete kleine Downloads aktiv ist und eine sinnvolle Schwelle gesetzt ist - if !s.AutoDeleteSmallDownloads { - return 0 - } - - mb := s.AutoDeleteSmallDownloadsBelowMB - if mb <= 0 { - return 0 - } - - // MB -> Bytes (MiB passend zum restlichen Code mit GiB) - return uint64(mb) * 1024 * 1024 -} - const giB = uint64(1024 * 1024 * 1024) // computeDiskThresholds: -// Pause = ceil( (2 * inFlightBytes) / GiB ) +// Pause = max(lowDiskPauseBelowGB, ceil(relevantInFlightBytes / GiB)) // Resume = Pause + 3 GB (Hysterese) -// Wenn inFlight==0 => Pause/Resume = 0 -func computeDiskThresholds() (pauseGB int, resumeGB int, inFlight uint64, pauseNeed uint64, resumeNeed uint64) { - inFlight = sumInFlightBytes() - if inFlight == 0 { - return 0, 0, 0, 0, 0 +// +// relevantInFlightBytes = Summe aller laufenden Downloads, +// deren aktuelle Dateigröße über AutoDeleteSmallDownloadsBelowMB liegt. +// +// Damit greift immer mindestens die konfigurierte Mindestschwelle, +// zusätzlich aber auch eine dynamische Schwelle basierend auf den +// "relevanten" Downloads, die nicht automatisch gelöscht würden. +func computeDiskThresholds() (pauseGB int, resumeGB int, relevantInFlight uint64, pauseNeed uint64, resumeNeed uint64) { + s := getSettings() + + relevantInFlight = sumInFlightBytesAboveAutoDeleteThreshold() + + configPauseGB := s.LowDiskPauseBelowGB + if configPauseGB <= 0 { + configPauseGB = 5 } - need := inFlight * 2 - pauseGB = int((need + giB - 1) / giB) // ceil + dynamicPauseGB := 0 + if relevantInFlight > 0 { + dynamicPauseGB = int((relevantInFlight + giB - 1) / giB) // ceil + } + + // größere Schwelle nehmen: + // - manuelle Mindestreserve + // - dynamische Reserve für relevante laufende Downloads + pauseGB = dynamicPauseGB + if pauseGB <= 0 { + pauseGB = configPauseGB + } - // Safety cap (nur zur Sicherheit, falls irgendwas eskaliert) if pauseGB > 10_000 { pauseGB = 10_000 } @@ -233,7 +235,6 @@ func computeDiskThresholds() (pauseGB int, resumeGB int, inFlight uint64, pauseN // Idee: Für TS->MP4 Peak brauchst du grob nochmal die Größe der aktuellen Datei als Reserve. func sumInFlightBytes() uint64 { var sum uint64 - minKeepBytes := minRelevantInFlightBytes() jobsMu.Lock() defer jobsMu.Unlock() @@ -246,19 +247,47 @@ func sumInFlightBytes() uint64 { continue } - b := inFlightBytesForJob(j) + sum += inFlightBytesForJob(j) + } - // ✅ Nur "relevante" Dateien berücksichtigen: - // Wenn Auto-Delete kleine Downloads aktiv ist, zählen wir nur Jobs, - // deren aktuelle Dateigröße bereits über der Schwelle liegt. - // - // Hinweis: Ein Job kann später noch über die Schwelle wachsen. - // Diese Logik ist bewusst "weniger konservativ", so wie gewünscht. - if minKeepBytes > 0 && b > 0 && b < minKeepBytes { + return sum +} + +func sumInFlightBytesAboveAutoDeleteThreshold() uint64 { + s := getSettings() + + thresholdMB := s.AutoDeleteSmallDownloadsBelowMB + if thresholdMB < 0 { + thresholdMB = 0 + } + + thresholdBytes := uint64(thresholdMB) * 1024 * 1024 + + var sum uint64 + + jobsMu.Lock() + defer jobsMu.Unlock() + + for _, j := range jobs { + if j == nil { + continue + } + if j.Status != JobRunning { continue } - sum += b + size := inFlightBytesForJob(j) + if size == 0 { + continue + } + + // Nur Downloads berücksichtigen, die über der Auto-Delete-Grenze liegen. + // Kleine Dateien würden später ohnehin automatisch entfernt. + if size <= thresholdBytes { + continue + } + + sum += size } return sum @@ -294,11 +323,11 @@ func startDiskSpaceGuard() { } free := u.Free - // ✅ Dynamische Schwellen: - // Pause = ceil((2 * inFlight) / GiB) + // ✅ Schwellen: + // Pause = max(config lowDiskPauseBelowGB, ceil((2 * inFlight) / GiB)) // Resume = Pause + 3 GB // pauseNeed/resumeNeed sind die benötigten freien Bytes - pauseGB, resumeGB, inFlight, pauseNeed, resumeNeed := computeDiskThresholds() + pauseGB, resumeGB, relevantInFlight, pauseNeed, resumeNeed := computeDiskThresholds() // ✅ diskEmergency NICHT sticky behalten. // Stattdessen dynamisch mit Hysterese setzen/löschen: @@ -310,20 +339,6 @@ func startDiskSpaceGuard() { wasEmergency := atomic.LoadInt32(&diskEmergency) == 1 - // Wenn aktuell nichts läuft, brauchen wir keine Reservierung. - // Dann diskEmergency freigeben (falls gesetzt), damit Autostart wieder möglich ist. - // (User-Pause bleibt davon unberührt.) - if inFlight == 0 { - if wasEmergency { - atomic.StoreInt32(&diskEmergency, 0) - broadcastAutostartPaused() - fmt.Printf("✅ [disk] Emergency cleared (no in-flight jobs). free=%s (%dB) path=%s\n", - formatBytesSI(u64ToI64(free)), free, dir, - ) - } - continue - } - isLowForPause := free < pauseNeed isHighEnoughForResume := free >= resumeNeed @@ -337,11 +352,11 @@ func startDiskSpaceGuard() { broadcastAutostartPaused() fmt.Printf( - "🛑 [disk] Low space: free=%s (%dB) (< %s, %dB, pause=%dGB resume=%dGB, inFlight=%s, %dB) -> stop jobs + block autostart via diskEmergency (path=%s)\n", + "🛑 [disk] Low space: free=%s (%dB) (< %s, %dB, pause=%dGB resume=%dGB, relevantInFlight=%s, %dB) -> stop jobs + block autostart via diskEmergency (path=%s)\n", formatBytesSI(u64ToI64(free)), free, formatBytesSI(u64ToI64(pauseNeed)), pauseNeed, pauseGB, resumeGB, - formatBytesSI(u64ToI64(inFlight)), inFlight, + formatBytesSI(u64ToI64(relevantInFlight)), relevantInFlight, dir, ) @@ -358,11 +373,11 @@ func startDiskSpaceGuard() { broadcastAutostartPaused() fmt.Printf( - "✅ [disk] Space recovered: free=%s (%dB) (>= %s, %dB, resume=%dGB, inFlight=%s, %dB) -> unblock autostart (path=%s)\n", + "✅ [disk] Space recovered: free=%s (%dB) (>= %s, %dB, resume=%dGB, relevantInFlight=%s, %dB) -> unblock autostart (path=%s)\n", formatBytesSI(u64ToI64(free)), free, formatBytesSI(u64ToI64(resumeNeed)), resumeNeed, resumeGB, - formatBytesSI(u64ToI64(inFlight)), inFlight, + formatBytesSI(u64ToI64(relevantInFlight)), relevantInFlight, dir, ) } diff --git a/backend/frontend.go b/backend/frontend.go index c98bdde..6df1a30 100644 --- a/backend/frontend.go +++ b/backend/frontend.go @@ -1,118 +1,38 @@ +// backend\frontend.go + package main import ( + "embed" "fmt" + "io/fs" "net/http" - "os" "path" - "path/filepath" "strings" ) -// Frontend (Vite build) als SPA ausliefern: Dateien aus dist, sonst index.html -func registerFrontend(mux *http.ServeMux) { - // Kandidaten: zuerst ENV, dann typische Ordner - candidates := []string{ - strings.TrimSpace(os.Getenv("FRONTEND_DIST")), - "web/dist", - "dist", - } - - var distAbs string - for _, c := range candidates { - if c == "" { - continue - } - abs, err := resolvePathRelativeToApp(c) - if err != nil { - continue - } - if fi, err := os.Stat(filepath.Join(abs, "index.html")); err == nil && !fi.IsDir() { - distAbs = abs - break - } - } - - if distAbs == "" { - fmt.Println("⚠️ Frontend dist nicht gefunden (tried: FRONTEND_DIST, frontend/dist, dist) – API läuft trotzdem.") - return - } - - fmt.Println("🖼️ Frontend dist:", distAbs) - - fileServer := http.FileServer(http.Dir(distAbs)) - - mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - // /api bleibt bei deinen API-Routen (längeres Pattern gewinnt), - // aber falls mal was durchrutscht: - if strings.HasPrefix(r.URL.Path, "/api/") { - http.NotFound(w, r) - return - } - - // 1) Wenn echte Datei existiert -> ausliefern - reqPath := r.URL.Path - if reqPath == "" || reqPath == "/" { - // index.html - w.Header().Set("Cache-Control", "no-store") - http.ServeFile(w, r, filepath.Join(distAbs, "index.html")) - return - } - - // URL-Pfad in Dateisystem-Pfad umwandeln (ohne Traversal) - clean := path.Clean("/" + reqPath) // path.Clean (für URL-Slashes) - rel := strings.TrimPrefix(clean, "/") - onDisk := filepath.Join(distAbs, filepath.FromSlash(rel)) - - if fi, err := os.Stat(onDisk); err == nil && !fi.IsDir() { - // Statische Assets ruhig cachen (Vite hashed assets) - ext := strings.ToLower(filepath.Ext(onDisk)) - if ext != "" && ext != ".html" { - w.Header().Set("Cache-Control", "public, max-age=31536000, immutable") - } else { - w.Header().Set("Cache-Control", "no-store") - } - fileServer.ServeHTTP(w, r) - return - } - - // 2) SPA-Fallback: alle "Routen" ohne Datei -> index.html - w.Header().Set("Cache-Control", "no-store") - http.ServeFile(w, r, filepath.Join(distAbs, "index.html")) - }) -} +// Vite-Build einbetten. +// Beim Go-Build muss backend/web/dist bereits existieren. +// +//go:embed web/dist web/dist/* +var embeddedFrontend embed.FS func makeFrontendHandler() (http.Handler, bool) { - // Kandidaten: zuerst ENV, dann typische Ordner - candidates := []string{ - strings.TrimSpace(os.Getenv("FRONTEND_DIST")), - "web/dist", - "dist", - } - - var distAbs string - for _, c := range candidates { - if c == "" { - continue - } - abs, err := resolvePathRelativeToApp(c) - if err != nil { - continue - } - if fi, err := os.Stat(filepath.Join(abs, "index.html")); err == nil && !fi.IsDir() { - distAbs = abs - break - } - } - - if distAbs == "" { - fmt.Println("⚠️ Frontend dist nicht gefunden (tried: FRONTEND_DIST, web/dist, dist) – API läuft trotzdem.") + distFS, err := fs.Sub(embeddedFrontend, "web/dist") + if err != nil { + fmt.Println("⚠️ Frontend dist nicht im Binary gefunden – API läuft trotzdem:", err) return nil, false } - fmt.Println("🖼️ Frontend dist:", distAbs) + // Prüfen, ob index.html vorhanden ist + if _, err := fs.Stat(distFS, "index.html"); err != nil { + fmt.Println("⚠️ Frontend index.html nicht im Binary gefunden – API läuft trotzdem:", err) + return nil, false + } - fileServer := http.FileServer(http.Dir(distAbs)) + fmt.Println("🖼️ Frontend dist: embedded web/dist") + + fileServer := http.FileServer(http.FS(distFS)) h := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // /api bleibt API @@ -124,17 +44,23 @@ func makeFrontendHandler() (http.Handler, bool) { reqPath := r.URL.Path if reqPath == "" || reqPath == "/" { w.Header().Set("Cache-Control", "no-store") - http.ServeFile(w, r, filepath.Join(distAbs, "index.html")) + indexBytes, err := fs.ReadFile(distFS, "index.html") + if err != nil { + http.Error(w, "index.html nicht gefunden", http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "text/html; charset=utf-8") + _, _ = w.Write(indexBytes) return } - // URL-Pfad in Dateisystem-Pfad umwandeln (ohne Traversal) + // URL-Pfad bereinigen clean := path.Clean("/" + reqPath) rel := strings.TrimPrefix(clean, "/") - onDisk := filepath.Join(distAbs, filepath.FromSlash(rel)) - if fi, err := os.Stat(onDisk); err == nil && !fi.IsDir() { - ext := strings.ToLower(filepath.Ext(onDisk)) + // Wenn echte Datei im embedded FS existiert -> ausliefern + if fi, err := fs.Stat(distFS, rel); err == nil && !fi.IsDir() { + ext := strings.ToLower(path.Ext(rel)) if ext != "" && ext != ".html" { w.Header().Set("Cache-Control", "public, max-age=31536000, immutable") } else { @@ -146,8 +72,22 @@ func makeFrontendHandler() (http.Handler, bool) { // SPA-Fallback w.Header().Set("Cache-Control", "no-store") - http.ServeFile(w, r, filepath.Join(distAbs, "index.html")) + indexBytes, err := fs.ReadFile(distFS, "index.html") + if err != nil { + http.Error(w, "index.html nicht gefunden", http.StatusInternalServerError) + return + } + w.Header().Set("Content-Type", "text/html; charset=utf-8") + _, _ = w.Write(indexBytes) }) return h, true } + +func registerFrontend(mux *http.ServeMux) { + h, ok := makeFrontendHandler() + if !ok { + return + } + mux.Handle("/", h) +} diff --git a/backend/go.mod b/backend/go.mod index 1310b23..c3eef54 100644 --- a/backend/go.mod +++ b/backend/go.mod @@ -9,6 +9,7 @@ require ( github.com/jackc/pgx/v5 v5.8.0 github.com/pquerna/otp v1.5.0 github.com/r3labs/sse/v2 v2.10.0 + github.com/yalue/onnxruntime_go v1.27.0 golang.org/x/crypto v0.47.0 ) @@ -24,8 +25,8 @@ require ( github.com/tklauser/go-sysconf v0.3.12 // indirect github.com/tklauser/numcpus v0.6.1 // indirect github.com/yusufpapurcu/wmi v1.2.4 // indirect - golang.org/x/sync v0.19.0 // indirect - golang.org/x/text v0.33.0 // indirect + golang.org/x/sync v0.20.0 // indirect + golang.org/x/text v0.35.0 // indirect gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect ) @@ -34,7 +35,7 @@ require ( github.com/andybalholm/cascadia v1.3.3 // indirect github.com/shirou/gopsutil/v3 v3.24.5 github.com/sqweek/dialog v0.0.0-20240226140203-065105509627 - golang.org/x/image v0.35.0 + golang.org/x/image v0.37.0 golang.org/x/net v0.48.0 // indirect golang.org/x/sys v0.40.0 // indirect ) diff --git a/backend/go.sum b/backend/go.sum index 2b594e8..ca705d9 100644 --- a/backend/go.sum +++ b/backend/go.sum @@ -53,6 +53,8 @@ github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFA github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= +github.com/yalue/onnxruntime_go v1.27.0 h1:c1YSgDNtpf0WGtxj3YeRIb8VC5LmM1J+Ve3uHdteC1U= +github.com/yalue/onnxruntime_go v1.27.0/go.mod h1:b4X26A8pekNb1ACJ58wAXgNKeUCGEAQ9dmACut9Sm/4= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0= github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= @@ -64,8 +66,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8= golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A= -golang.org/x/image v0.35.0 h1:LKjiHdgMtO8z7Fh18nGY6KDcoEtVfsgLDPeLyguqb7I= -golang.org/x/image v0.35.0/go.mod h1:MwPLTVgvxSASsxdLzKrl8BRFuyqMyGhLwmC+TO1Sybk= +golang.org/x/image v0.37.0 h1:ZiRjArKI8GwxZOoEtUfhrBtaCN+4b/7709dlT6SSnQA= +golang.org/x/image v0.37.0/go.mod h1:/3f6vaXC+6CEanU4KJxbcUZyEePbyKbaLoDOe4ehFYY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -90,8 +92,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= -golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4= +golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -126,8 +128,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE= -golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8= +golang.org/x/text v0.35.0 h1:JOVx6vVDFokkpaq1AEptVzLTpDe9KGpj5tR4/X+ybL8= +golang.org/x/text v0.35.0/go.mod h1:khi/HExzZJ2pGnjenulevKNX1W67CUy0AsXcNubPGCA= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= diff --git a/backend/live.go b/backend/live.go index c577c8b..add3410 100644 --- a/backend/live.go +++ b/backend/live.go @@ -77,14 +77,6 @@ func maybeBlockHLSOnPreview(w http.ResponseWriter, r *http.Request, basePath, fi w.Header().Set("Cache-Control", "no-store") w.Header().Set("X-Preview-HLS-Disabled", "1") - // Optionales Debug (hilft dir, den Auslöser zu finden) - fmt.Printf("[HLS-BLOCK] %s file=%q referer=%q ua=%q\n", - r.URL.String(), - file, - r.Referer(), - r.Header.Get("User-Agent"), - ) - http.Error(w, "HLS disabled on /api/preview; use /api/preview/live", http.StatusGone) // 410 return true } diff --git a/backend/main.go b/backend/main.go index e1831af..1533931 100644 --- a/backend/main.go +++ b/backend/main.go @@ -543,9 +543,6 @@ func startAdaptiveSemController(ctx context.Context) { genSem.SetMax(genSem.Max() + 1) thumbSem.SetMax(thumbSem.Max() + 1) } - - // optional Debug: - // fmt.Printf("CPU %.1f%% -> preview=%d thumb=%d gen=%d\n", usage, previewSem.Max(), thumbSem.Max(), genSem.Max()) } } }() diff --git a/backend/meta.go b/backend/meta.go index 13c91dd..5d5b1df 100644 --- a/backend/meta.go +++ b/backend/meta.go @@ -39,15 +39,33 @@ type videoMeta struct { VideoWidth int `json:"videoWidth,omitempty"` VideoHeight int `json:"videoHeight,omitempty"` FPS float64 `json:"fps,omitempty"` - Resolution string `json:"resolution,omitempty"` // z.B. "1920x1080" + Resolution string `json:"resolution,omitempty"` SourceURL string `json:"sourceUrl,omitempty"` PreviewClips []previewClip `json:"previewClips,omitempty"` PreviewSprite *previewSpriteMeta `json:"previewSprite,omitempty"` + AI *aiAnalysisMeta `json:"ai,omitempty"` UpdatedAtUnix int64 `json:"updatedAtUnix"` } +type aiSegmentMeta struct { + Label string `json:"label"` + StartSeconds float64 `json:"startSeconds"` + EndSeconds float64 `json:"endSeconds"` + DurationSeconds float64 `json:"durationSeconds"` + Score float64 `json:"score,omitempty"` + AutoSelected bool `json:"autoSelected,omitempty"` +} + +type aiAnalysisMeta struct { + Goal string `json:"goal,omitempty"` + Mode string `json:"mode,omitempty"` + Hits []analyzeHit `json:"hits,omitempty"` + Segments []aiSegmentMeta `json:"segments,omitempty"` + AnalyzedAtUnix int64 `json:"analyzedAtUnix,omitempty"` +} + // liest Meta (v2 ODER altes v1) und validiert gegen fi (Size/ModTime) func readVideoMeta(metaPath string, fi os.FileInfo) (dur float64, w int, h int, fps float64, ok bool) { b, err := os.ReadFile(metaPath) @@ -338,14 +356,15 @@ func writeVideoMeta(metaPath string, fi os.FileInfo, dur float64, w int, h int, SourceURL: strings.TrimSpace(sourceURL), UpdatedAtUnix: time.Now().Unix(), - // ✅ bestehende Preview-Daten behalten PreviewClips: nil, PreviewSprite: nil, + AI: nil, } if existing != nil { m.PreviewClips = existing.PreviewClips m.PreviewSprite = existing.PreviewSprite + m.AI = existing.AI } buf, err := json.Marshal(m) if err != nil { @@ -378,6 +397,10 @@ func writeVideoMetaWithPreviewClips(metaPath string, fi os.FileInfo, dur float64 UpdatedAtUnix: time.Now().Unix(), } + if existing != nil { + m.AI = existing.AI + } + // ✅ vorhandenes Sprite (inkl. stepSeconds) nicht wegwerfen if existing != nil && existing.PreviewSprite != nil { m.PreviewSprite = existing.PreviewSprite @@ -431,6 +454,10 @@ func writeVideoMetaWithPreviewClipsAndSprite( } } + if old, ok := readVideoMetaIfValid(metaPath, fi); ok && old != nil && old.AI != nil { + m.AI = old.AI + } + buf, err := json.Marshal(m) if err != nil { return err @@ -538,3 +565,137 @@ func sanitizeID(id string) (string, error) { } return id, nil } + +func writeVideoMetaAI( + metaPath string, + fi os.FileInfo, + dur float64, + w int, + h int, + fps float64, + sourceURL string, + ai *aiAnalysisMeta, +) error { + if strings.TrimSpace(metaPath) == "" || dur <= 0 { + return nil + } + + var existing *videoMeta + if old, ok := readVideoMetaIfValid(metaPath, fi); ok && old != nil { + existing = old + } + + m := videoMeta{ + Version: 2, + DurationSeconds: dur, + FileSize: fi.Size(), + FileModUnix: fi.ModTime().Unix(), + VideoWidth: w, + VideoHeight: h, + FPS: fps, + Resolution: formatResolution(w, h), + SourceURL: strings.TrimSpace(sourceURL), + UpdatedAtUnix: time.Now().Unix(), + + PreviewClips: nil, + PreviewSprite: nil, + AI: ai, + } + + if existing != nil { + m.PreviewClips = existing.PreviewClips + m.PreviewSprite = existing.PreviewSprite + if m.VideoWidth <= 0 { + m.VideoWidth = existing.VideoWidth + } + if m.VideoHeight <= 0 { + m.VideoHeight = existing.VideoHeight + } + if m.FPS <= 0 { + m.FPS = existing.FPS + } + if m.Resolution == "" { + m.Resolution = existing.Resolution + } + if m.SourceURL == "" { + m.SourceURL = existing.SourceURL + } + } + + buf, err := json.MarshalIndent(m, "", " ") + if err != nil { + return err + } + buf = append(buf, '\n') + return atomicWriteFile(metaPath, buf) +} + +func writeVideoAIForFile( + ctx context.Context, + fullPath string, + sourceURL string, + ai *aiAnalysisMeta, +) error { + fullPath = strings.TrimSpace(fullPath) + if fullPath == "" || ai == nil { + return nil + } + + fi, err := os.Stat(fullPath) + if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 { + return fmt.Errorf("datei nicht gefunden") + } + + m, ok := ensureVideoMetaForFileBestEffort(ctx, fullPath, sourceURL) + if !ok || m == nil { + return fmt.Errorf("meta konnte nicht erzeugt werden") + } + + stem := strings.TrimSuffix(filepath.Base(fullPath), filepath.Ext(fullPath)) + assetID := stripHotPrefix(strings.TrimSpace(stem)) + if assetID == "" { + return fmt.Errorf("asset id fehlt") + } + + assetID, err = sanitizeID(assetID) + if err != nil || assetID == "" { + return fmt.Errorf("asset id ungültig: %w", err) + } + + metaPath, err := metaJSONPathForAssetID(assetID) + if err != nil { + return err + } + + return writeVideoMetaAI( + metaPath, + fi, + m.DurationSeconds, + m.VideoWidth, + m.VideoHeight, + m.FPS, + sourceURL, + ai, + ) +} + +func readVideoMetaAI(metaPath string) (map[string]any, bool) { + b, err := os.ReadFile(metaPath) + if err != nil || len(b) == 0 { + return nil, false + } + + var m map[string]any + dec := json.NewDecoder(strings.NewReader(string(b))) + dec.UseNumber() + if err := dec.Decode(&m); err != nil { + return nil, false + } + + ai, ok := m["ai"].(map[string]any) + if !ok || ai == nil { + return nil, false + } + + return ai, true +} diff --git a/backend/nsfw_assets.go b/backend/nsfw_assets.go new file mode 100644 index 0000000..e4978af --- /dev/null +++ b/backend/nsfw_assets.go @@ -0,0 +1,117 @@ +// backend\nsfw_assets.go + +package main + +import ( + "embed" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "runtime" + "strings" +) + +//go:embed all:assets/nsfw +var embeddedNSFWAssets embed.FS + +const nsfwRuntimeVersion = "nsfw-onnx-v1" + +func nsfwRuntimeBaseDir() (string, error) { + appName := "nsfwapp" + + if runtime.GOOS == "windows" { + base := strings.TrimSpace(os.Getenv("LOCALAPPDATA")) + if base == "" { + base = strings.TrimSpace(os.Getenv("TEMP")) + } + if base == "" { + return "", fmt.Errorf("LOCALAPPDATA/TEMP nicht gefunden") + } + return filepath.Join(base, appName, nsfwRuntimeVersion), nil + } + + base, err := os.UserCacheDir() + if err != nil { + return "", err + } + return filepath.Join(base, appName, nsfwRuntimeVersion), nil +} + +func ensureNSFWAssetsExtracted() (string, error) { + root, err := nsfwRuntimeBaseDir() + if err != nil { + return "", err + } + + marker := filepath.Join(root, ".extract-ok") + if fi, err := os.Stat(marker); err == nil && !fi.IsDir() { + return root, nil + } + + if _, err := fs.Stat(embeddedNSFWAssets, "assets/nsfw"); err != nil { + return "", fmt.Errorf("embedded assets/nsfw nicht gefunden: %w", err) + } + + if err := os.RemoveAll(root); err != nil { + return "", fmt.Errorf("runtime-ordner konnte nicht bereinigt werden: %w", err) + } + if err := os.MkdirAll(root, 0o755); err != nil { + return "", fmt.Errorf("runtime-ordner konnte nicht erstellt werden: %w", err) + } + + if err := extractEmbeddedDir("assets/nsfw", root); err != nil { + return "", fmt.Errorf("nsfw-assets extrahieren fehlgeschlagen: %w", err) + } + + if err := os.WriteFile(marker, []byte("ok"), 0o644); err != nil { + return "", fmt.Errorf("markerdatei konnte nicht geschrieben werden: %w", err) + } + + return root, nil +} + +func extractEmbeddedDir(srcRoot, dstRoot string) error { + return fs.WalkDir(embeddedNSFWAssets, srcRoot, func(p string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + + rel, err := filepath.Rel(srcRoot, p) + if err != nil { + return err + } + if rel == "." { + return os.MkdirAll(dstRoot, 0o755) + } + + dstPath := filepath.Join(dstRoot, rel) + + if d.IsDir() { + return os.MkdirAll(dstPath, 0o755) + } + + if err := os.MkdirAll(filepath.Dir(dstPath), 0o755); err != nil { + return err + } + + in, err := embeddedNSFWAssets.Open(p) + if err != nil { + return err + } + defer in.Close() + + out, err := os.OpenFile(dstPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o755) + if err != nil { + return err + } + + _, copyErr := io.Copy(out, in) + closeErr := out.Close() + if copyErr != nil { + return copyErr + } + return closeErr + }) +} diff --git a/backend/nsfw_detector.go b/backend/nsfw_detector.go new file mode 100644 index 0000000..1f01681 --- /dev/null +++ b/backend/nsfw_detector.go @@ -0,0 +1,409 @@ +// backend\nsfw_detector.go + +package main + +import ( + "bytes" + "encoding/base64" + "fmt" + "image" + _ "image/jpeg" + _ "image/png" + "math" + "os" + "path/filepath" + "sort" + "strings" + "sync" + + ort "github.com/yalue/onnxruntime_go" + xdraw "golang.org/x/image/draw" +) + +const ( + nsfwInputSize = 320 + nsfwNumClasses = 18 + nsfwNumAnchors = 2100 // 320er YOLOv8: 40*40 + 20*20 + 10*10 + nsfwConfThresh = 0.20 + nsfwNMSThresh = 0.45 +) + +var nsfwLabels = []string{ + "female_genitalia_covered", + "face_female", + "buttocks_exposed", + "female_breast_exposed", + "female_genitalia_exposed", + "male_breast_exposed", + "anus_exposed", + "feet_exposed", + "belly_covered", + "feet_covered", + "armpits_covered", + "armpits_exposed", + "face_male", + "belly_exposed", + "male_genitalia_exposed", + "anus_covered", + "female_breast_covered", + "buttocks_covered", +} + +type nsfwDetector struct { + mu sync.Mutex + initialized bool + runtimeRoot string + modelPath string + dllPath string + inputTensor *ort.Tensor[float32] + outputTensor *ort.Tensor[float32] + session *ort.AdvancedSession +} + +type yoloDet struct { + classID int + score float32 + x1 float32 + y1 float32 + x2 float32 + y2 float32 +} + +var globalNSFW nsfwDetector + +func initNSFWDetector() error { + globalNSFW.mu.Lock() + defer globalNSFW.mu.Unlock() + + if globalNSFW.initialized { + return nil + } + + root, err := ensureNSFWAssetsExtracted() + if err != nil { + return err + } + + dllPath := filepath.Join(root, "onnxruntime.dll") + modelPath := filepath.Join(root, "320n.onnx") + + if _, err := os.Stat(dllPath); err != nil { + return fmt.Errorf("onnxruntime.dll nicht gefunden: %w", err) + } + if _, err := os.Stat(modelPath); err != nil { + return fmt.Errorf("320n.onnx nicht gefunden: %w", err) + } + + ort.SetSharedLibraryPath(dllPath) + if err := ort.InitializeEnvironment(); err != nil { + return fmt.Errorf("onnxruntime init fehlgeschlagen: %w", err) + } + + inputShape := ort.NewShape(1, 3, nsfwInputSize, nsfwInputSize) + inputData := make([]float32, 1*3*nsfwInputSize*nsfwInputSize) + inputTensor, err := ort.NewTensor(inputShape, inputData) + if err != nil { + ort.DestroyEnvironment() + return fmt.Errorf("input tensor fehlgeschlagen: %w", err) + } + + outputShape := ort.NewShape(1, 4+nsfwNumClasses, nsfwNumAnchors) + outputTensor, err := ort.NewEmptyTensor[float32](outputShape) + if err != nil { + inputTensor.Destroy() + ort.DestroyEnvironment() + return fmt.Errorf("output tensor fehlgeschlagen: %w", err) + } + + session, err := ort.NewAdvancedSession( + modelPath, + []string{"images"}, + []string{"output0"}, + []ort.Value{inputTensor}, + []ort.Value{outputTensor}, + nil, + ) + if err != nil { + outputTensor.Destroy() + inputTensor.Destroy() + ort.DestroyEnvironment() + return fmt.Errorf("onnx session fehlgeschlagen: %w", err) + } + + globalNSFW.runtimeRoot = root + globalNSFW.modelPath = modelPath + globalNSFW.dllPath = dllPath + globalNSFW.inputTensor = inputTensor + globalNSFW.outputTensor = outputTensor + globalNSFW.session = session + globalNSFW.initialized = true + + fmt.Println("[NSFW] ONNX detector bereit") + fmt.Println("[NSFW] model:", modelPath) + fmt.Println("[NSFW] dll:", dllPath) + + return nil +} + +func closeNSFWDetector() error { + globalNSFW.mu.Lock() + defer globalNSFW.mu.Unlock() + + if !globalNSFW.initialized { + return nil + } + + if globalNSFW.session != nil { + globalNSFW.session.Destroy() + globalNSFW.session = nil + } + if globalNSFW.outputTensor != nil { + globalNSFW.outputTensor.Destroy() + globalNSFW.outputTensor = nil + } + if globalNSFW.inputTensor != nil { + globalNSFW.inputTensor.Destroy() + globalNSFW.inputTensor = nil + } + + ort.DestroyEnvironment() + globalNSFW.initialized = false + + return nil +} + +func detectNSFWFromBase64(imageB64 string) ([]NsfwFrameResult, error) { + globalNSFW.mu.Lock() + defer globalNSFW.mu.Unlock() + + if !globalNSFW.initialized || globalNSFW.session == nil { + return nil, fmt.Errorf("nsfw detector nicht initialisiert") + } + + img, err := decodeBase64Image(imageB64) + if err != nil { + return nil, err + } + + fillInputTensor(globalNSFW.inputTensor.GetData(), img) + + if err := globalNSFW.session.Run(); err != nil { + return nil, fmt.Errorf("onnx run fehlgeschlagen: %w", err) + } + + raw := globalNSFW.outputTensor.GetData() + dets := parseYOLOOutput(raw, nsfwConfThresh) + dets = applyNMS(dets, nsfwNMSThresh) + + bestByLabel := map[string]float64{} + for _, d := range dets { + if d.classID < 0 || d.classID >= len(nsfwLabels) { + continue + } + label := nsfwLabels[d.classID] + score := float64(d.score) + if score > bestByLabel[label] { + bestByLabel[label] = score + } + } + + out := make([]NsfwFrameResult, 0, len(bestByLabel)) + for label, score := range bestByLabel { + out = append(out, NsfwFrameResult{ + Label: label, + Score: score, + }) + } + + sort.Slice(out, func(i, j int) bool { + return out[i].Score > out[j].Score + }) + + return out, nil +} + +func decodeBase64Image(imageB64 string) (image.Image, error) { + raw, err := base64.StdEncoding.DecodeString(strings.TrimSpace(imageB64)) + if err != nil { + return nil, fmt.Errorf("base64 decode fehlgeschlagen: %w", err) + } + img, _, err := image.Decode(bytes.NewReader(raw)) + if err != nil { + return nil, fmt.Errorf("bild decode fehlgeschlagen: %w", err) + } + return img, nil +} + +func fillInputTensor(dst []float32, src image.Image) { + rgba, scale, padX, padY := letterboxToRGBA(src, nsfwInputSize, nsfwInputSize) + + hw := nsfwInputSize * nsfwInputSize + for y := 0; y < nsfwInputSize; y++ { + for x := 0; x < nsfwInputSize; x++ { + i := y*rgba.Stride + x*4 + r := float32(rgba.Pix[i+0]) / 255.0 + g := float32(rgba.Pix[i+1]) / 255.0 + b := float32(rgba.Pix[i+2]) / 255.0 + + idx := y*nsfwInputSize + x + dst[idx] = r + dst[hw+idx] = g + dst[2*hw+idx] = b + } + } + + _ = scale + _ = padX + _ = padY +} + +func letterboxToRGBA(src image.Image, dstW, dstH int) (*image.RGBA, float64, int, int) { + sb := src.Bounds() + sw := sb.Dx() + sh := sb.Dy() + + scale := math.Min(float64(dstW)/float64(sw), float64(dstH)/float64(sh)) + nw := int(math.Round(float64(sw) * scale)) + nh := int(math.Round(float64(sh) * scale)) + + dst := image.NewRGBA(image.Rect(0, 0, dstW, dstH)) + + for y := 0; y < dstH; y++ { + for x := 0; x < dstW; x++ { + i := y*dst.Stride + x*4 + dst.Pix[i+0] = 114 + dst.Pix[i+1] = 114 + dst.Pix[i+2] = 114 + dst.Pix[i+3] = 255 + } + } + + resized := image.NewRGBA(image.Rect(0, 0, nw, nh)) + xdraw.ApproxBiLinear.Scale(resized, resized.Bounds(), src, sb, xdraw.Over, nil) + + padX := (dstW - nw) / 2 + padY := (dstH - nh) / 2 + + for y := 0; y < nh; y++ { + copy( + dst.Pix[(y+padY)*dst.Stride+padX*4:(y+padY)*dst.Stride+padX*4+nw*4], + resized.Pix[y*resized.Stride:y*resized.Stride+nw*4], + ) + } + + return dst, scale, padX, padY +} + +func parseYOLOOutput(raw []float32, confThresh float32) []yoloDet { + // output0: [1, 22, 2100] = [batch, 4+18, anchors] + out := make([]yoloDet, 0, 64) + channels := 4 + nsfwNumClasses + if len(raw) != channels*nsfwNumAnchors { + return out + } + + for a := 0; a < nsfwNumAnchors; a++ { + cx := raw[0*nsfwNumAnchors+a] + cy := raw[1*nsfwNumAnchors+a] + w := raw[2*nsfwNumAnchors+a] + h := raw[3*nsfwNumAnchors+a] + + bestClass := -1 + bestScore := float32(0) + + for c := 0; c < nsfwNumClasses; c++ { + s := raw[(4+c)*nsfwNumAnchors+a] + if s > bestScore { + bestScore = s + bestClass = c + } + } + + if bestClass < 0 || bestScore < confThresh { + continue + } + + x1 := cx - w/2 + y1 := cy - h/2 + x2 := cx + w/2 + y2 := cy + h/2 + + out = append(out, yoloDet{ + classID: bestClass, + score: bestScore, + x1: x1, + y1: y1, + x2: x2, + y2: y2, + }) + } + + return out +} + +func applyNMS(dets []yoloDet, iouThresh float32) []yoloDet { + if len(dets) == 0 { + return dets + } + + sort.Slice(dets, func(i, j int) bool { + return dets[i].score > dets[j].score + }) + + kept := make([]yoloDet, 0, len(dets)) + used := make([]bool, len(dets)) + + for i := 0; i < len(dets); i++ { + if used[i] { + continue + } + kept = append(kept, dets[i]) + + for j := i + 1; j < len(dets); j++ { + if used[j] || dets[i].classID != dets[j].classID { + continue + } + if iou(dets[i], dets[j]) >= iouThresh { + used[j] = true + } + } + } + + return kept +} + +func iou(a, b yoloDet) float32 { + ix1 := maxf(a.x1, b.x1) + iy1 := maxf(a.y1, b.y1) + ix2 := minf(a.x2, b.x2) + iy2 := minf(a.y2, b.y2) + + iw := maxf(0, ix2-ix1) + ih := maxf(0, iy2-iy1) + inter := iw * ih + + aw := maxf(0, a.x2-a.x1) + ah := maxf(0, a.y2-a.y1) + bw := maxf(0, b.x2-b.x1) + bh := maxf(0, b.y2-b.y1) + + union := aw*ah + bw*bh - inter + if union <= 0 { + return 0 + } + return inter / union +} + +func minf(a, b float32) float32 { + if a < b { + return a + } + return b +} + +func maxf(a, b float32) float32 { + if a > b { + return a + } + return b +} diff --git a/backend/nsfw_types.go b/backend/nsfw_types.go new file mode 100644 index 0000000..2452134 --- /dev/null +++ b/backend/nsfw_types.go @@ -0,0 +1,14 @@ +// backend\nsfw_types.go + +package main + +type NsfwFrameResult struct { + Label string `json:"label"` + Score float64 `json:"score"` +} + +type NsfwImageResponse struct { + Ok bool `json:"ok"` + Results []NsfwFrameResult `json:"results"` + Error string `json:"error,omitempty"` +} diff --git a/backend/postwork.go b/backend/postwork.go index dae0910..162123f 100644 --- a/backend/postwork.go +++ b/backend/postwork.go @@ -144,10 +144,7 @@ func (pq *PostWorkQueue) workerLoop(id int) { continue } - // 1) Heavy-Gate: erst wenn ein Slot frei ist, gilt der Task als "running" - pq.ffmpegSem <- struct{}{} - - // 2) Ab hier startet er wirklich → waiting -> running + // Task startet jetzt wirklich → waiting -> running pq.mu.Lock() pq.removeWaitingKeyLocked(task.Key) pq.runningKeys[task.Key] = struct{}{} @@ -170,9 +167,6 @@ func (pq *PostWorkQueue) workerLoop(id int) { pq.queued-- } pq.mu.Unlock() - - // Slot freigeben - <-pq.ffmpegSem }() // 3) Optional: Task timeout (gegen hängende ffmpeg) @@ -253,7 +247,7 @@ func (pq *PostWorkQueue) StatusForKey(key string) PostWorkKeyStatus { } // global (oder in deinem app struct halten) -var postWorkQ = NewPostWorkQueue(512, 2) // maxParallelFFmpeg = 4 +var postWorkQ = NewPostWorkQueue(512, 6) // maxParallelFFmpeg = 6 // --- Status Refresher (ehemals postwork_refresh.go) --- diff --git a/backend/preview.go b/backend/preview.go index 19adfe8..29bad97 100644 --- a/backend/preview.go +++ b/backend/preview.go @@ -1746,18 +1746,12 @@ func servePreviewForFinishedFile(w http.ResponseWriter, r *http.Request, id stri if tStr := strings.TrimSpace(r.URL.Query().Get("t")); tStr != "" { if sec, err := strconv.ParseFloat(tStr, 64); err == nil && sec >= 0 { - secI := int64(sec + 0.5) - if secI < 0 { - secI = 0 + if sec < 0 { + sec = 0 } - framePath := filepath.Join(assetDir, fmt.Sprintf("t_%d.webp", secI)) - if fi, err := os.Stat(framePath); err == nil && !fi.IsDir() && fi.Size() > 0 { - servePreviewWebPFile(w, r, framePath) - return - } - img, err := extractFrameAtTimeWebP(outPath, float64(secI)) + + img, err := extractFrameAtTimeWebP(outPath, sec) if err == nil && len(img) > 0 { - _ = atomicWriteFile(framePath, img) servePreviewWebPBytes(w, img) return } diff --git a/backend/record.go b/backend/record.go index bdd22d4..4adb6e7 100644 --- a/backend/record.go +++ b/backend/record.go @@ -22,6 +22,18 @@ import ( // ---------------- Types ---------------- +type prepareSplitReq struct { + Output string `json:"output"` + Goal string `json:"goal,omitempty"` // z.B. "nsfw" +} + +type prepareSplitResp struct { + OK bool `json:"ok"` + AssetsReady bool `json:"assetsReady"` + AnalyzeReady bool `json:"analyzeReady"` + Error string `json:"error,omitempty"` +} + type RecordRequest struct { URL string `json:"url"` Cookie string `json:"cookie,omitempty"` @@ -54,6 +66,7 @@ type doneMetaFileResp struct { FPS float64 `json:"fps,omitempty"` SourceURL string `json:"sourceUrl,omitempty"` PreviewSprite previewSpriteMetaResp `json:"previewSprite"` + AI any `json:"ai,omitempty"` Error string `json:"error,omitempty"` } @@ -115,6 +128,38 @@ func mustMethod(w http.ResponseWriter, r *http.Request, methods ...string) bool return false } +func recordPrepareSplit(w http.ResponseWriter, r *http.Request) { + if !mustMethod(w, r, http.MethodPost) { + return + } + + var req prepareSplitReq + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "ungültiger body: "+err.Error(), http.StatusBadRequest) + return + } + + ctx, cancel := context.WithTimeout(r.Context(), 90*time.Second) + defer cancel() + + res, err := prepareVideoForSplit(ctx, req.Output, "", req.Goal) + if err != nil { + respondJSON(w, prepareSplitResp{ + OK: false, + AssetsReady: false, + AnalyzeReady: false, + Error: err.Error(), + }) + return + } + + respondJSON(w, prepareSplitResp{ + OK: true, + AssetsReady: res.AssetsReady, + AnalyzeReady: res.AnalyzeReady, + }) +} + // ---------------- Preview sprite truth (shared) ---------------- type previewSpriteMetaFileInfo struct { @@ -1222,6 +1267,7 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { if mp, merr := generatedMetaFile(id); merr == nil && strings.TrimSpace(mp) != "" { if mfi, serr := os.Stat(mp); serr == nil && mfi != nil && !mfi.IsDir() && mfi.Size() > 0 { resp.MetaExists = true + if dur, w2, h2, fps2, ok := readVideoMeta(mp, fi); ok { resp.DurationSeconds = dur resp.Width = w2 @@ -1231,6 +1277,10 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { if u, ok := readVideoMetaSourceURL(mp, fi); ok { resp.SourceURL = u } + + if ai, ok := readVideoMetaAI(mp); ok { + resp.AI = ai + } } } } @@ -1298,21 +1348,61 @@ func recordDoneMeta(w http.ResponseWriter, r *http.Request) { sortedAll := doneCache.sortedIdx doneCache.mu.Unlock() + isActivePostworkOutput := func(fullPath string) bool { + base := strings.TrimSpace(filepath.Base(fullPath)) + if base == "" { + return false + } + + jobsMu.Lock() + defer jobsMu.Unlock() + + for _, j := range jobs { + if j == nil { + continue + } + + if !isPostworkJob(j) { + continue + } + if isTerminalJobStatus(j.Status) { + continue + } + + if strings.EqualFold(filepath.Base(strings.TrimSpace(j.Output)), base) { + return true + } + } + + return false + } + count := 0 if qModel == "" { incKey := "0" if includeKeep { incKey = "1" } - count = len(sortedAll[incKey+"|completed_desc"]) + + for _, idx := range sortedAll[incKey+"|completed_desc"] { + it := items[idx] + if isActivePostworkOutput(it.job.Output) { + continue + } + count++ + } } else { for _, it := range items { if !includeKeep && it.fromKeep { continue } - if it.modelKey == qModel { - count++ + if it.modelKey != qModel { + continue } + if isActivePostworkOutput(it.job.Output) { + continue + } + count++ } } @@ -1531,7 +1621,48 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { }) } - totalCount := len(idx) + isActivePostworkOutput := func(fullPath string) bool { + base := strings.TrimSpace(filepath.Base(fullPath)) + if base == "" { + return false + } + + jobsMu.Lock() + defer jobsMu.Unlock() + + for _, j := range jobs { + if j == nil { + continue + } + + if !isPostworkJob(j) { + continue + } + if isTerminalJobStatus(j.Status) { + continue + } + + if strings.EqualFold(filepath.Base(strings.TrimSpace(j.Output)), base) { + return true + } + } + + return false + } + + filteredIdx := make([]int, 0, len(idx)) + for _, ii := range idx { + it := items[ii] + if it.job == nil { + continue + } + if isActivePostworkOutput(it.job.Output) { + continue + } + filteredIdx = append(filteredIdx, ii) + } + + totalCount := len(filteredIdx) start := 0 end := totalCount @@ -1554,7 +1685,7 @@ func recordDoneList(w http.ResponseWriter, r *http.Request) { out := make([]*RecordJob, 0, max(0, end-start)) - for _, ii := range idx[start:end] { + for _, ii := range filteredIdx[start:end] { base := items[ii].job if base == nil { continue diff --git a/backend/record_paths.go b/backend/record_paths.go index a3655ac..1940458 100644 --- a/backend/record_paths.go +++ b/backend/record_paths.go @@ -85,6 +85,20 @@ func setNoStoreHeaders(w http.ResponseWriter) { // ---------- Resolve dirs ---------- +func exeDir() (string, error) { + exePath, err := os.Executable() + if err != nil { + return "", err + } + + exePath, err = filepath.Abs(exePath) + if err != nil { + return "", err + } + + return filepath.Dir(exePath), nil +} + func resolvePathRelativeToApp(p string) (string, error) { p = strings.TrimSpace(p) if p == "" { @@ -96,10 +110,9 @@ func resolvePathRelativeToApp(p string) (string, error) { return p, nil } - exe, err := os.Executable() + baseDir, err := exeDir() if err == nil { - exeDir := filepath.Dir(exe) - low := strings.ToLower(exeDir) + low := strings.ToLower(baseDir) // Heuristik: go run / tests -> exe liegt in Temp/go-build isTemp := strings.Contains(low, `\appdata\local\temp`) || @@ -110,7 +123,7 @@ func resolvePathRelativeToApp(p string) (string, error) { strings.Contains(low, `/go-build`) if !isTemp { - return filepath.Join(exeDir, p), nil + return filepath.Join(baseDir, p), nil } } diff --git a/backend/recorder.go b/backend/recorder.go index 84b558d..2364934 100644 --- a/backend/recorder.go +++ b/backend/recorder.go @@ -37,13 +37,15 @@ func setJobProgress(job *RecordJob, phase string, pct int) { case "postwork": return rng{0, 8} case "remuxing": - return rng{8, 42} + return rng{8, 38} case "moving": - return rng{42, 58} + return rng{38, 54} case "probe": - return rng{58, 72} + return rng{54, 70} case "assets": - return rng{72, 99} + return rng{70, 88} + case "analyze": + return rng{88, 99} default: return rng{0, 100} } @@ -448,7 +450,8 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { job.PostWorkKey = "" job.PostWork = nil jobsMu.Unlock() - publishJobUpsert(job) + + publishJobRemove(job) notifyDoneChanged() return } @@ -696,6 +699,37 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { } setPhase("assets", 100) + // 6) AI Analyze -> meta.json.ai + setPhase("analyze", 5) + { + actx, cancel := context.WithTimeout(ctx, 45*time.Second) + + durationSec, _ := durationSecondsForAnalyze(actx, out) + hits, aerr := analyzeVideoFromSprite(actx, out, "nsfw") + if aerr != nil { + fmt.Println("⚠️ postwork analyze:", aerr) + } else { + setPhase("analyze", 65) + + segments := buildSegmentsFromAnalyzeHits(hits, durationSec) + + ai := &aiAnalysisMeta{ + Goal: "nsfw", + Mode: "sprite", + Hits: hits, + Segments: segments, + AnalyzedAtUnix: time.Now().Unix(), + } + + if werr := writeVideoAIForFile(actx, out, job.SourceURL, ai); werr != nil { + fmt.Println("⚠️ writeVideoAIForFile:", werr) + } + } + + cancel() + } + setPhase("analyze", 100) + // Finalize jobsMu.Lock() job.Status = postTarget @@ -704,7 +738,8 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { job.PostWorkKey = "" job.PostWork = nil jobsMu.Unlock() - publishJobUpsert(job) + + publishJobRemove(job) notifyDoneChanged() return nil }, @@ -724,7 +759,8 @@ func runJob(ctx context.Context, job *RecordJob, req RecordRequest) { job.PostWorkKey = "" job.PostWork = nil jobsMu.Unlock() - publishJobUpsert(job) + + publishJobRemove(job) notifyDoneChanged() } } diff --git a/backend/recorder_settings.json b/backend/recorder_settings.json index 56093d1..118926d 100644 --- a/backend/recorder_settings.json +++ b/backend/recorder_settings.json @@ -10,6 +10,7 @@ "useMyFreeCamsWatcher": true, "autoDeleteSmallDownloads": false, "autoDeleteSmallDownloadsBelowMB": 50, + "lowDiskPauseBelowGB": 5, "blurPreviews": false, "teaserPlayback": "hover", "teaserAudio": false, diff --git a/backend/routes.go b/backend/routes.go index 1839ad7..adb9e63 100644 --- a/backend/routes.go +++ b/backend/routes.go @@ -54,12 +54,15 @@ func registerRoutes(mux *http.ServeMux, auth *AuthManager) *ModelStore { api.HandleFunc("/api/record/list", recordList) api.HandleFunc("/api/record/done/meta", recordDoneMeta) api.HandleFunc("/api/record/video", recordVideo) + api.HandleFunc("/api/record/split", recordSplitVideo) + api.HandleFunc("/api/record/analyze", recordAnalyzeVideo) api.HandleFunc("/api/record/done", recordDoneList) api.HandleFunc("/api/record/delete", recordDeleteVideo) api.HandleFunc("/api/record/toggle-hot", recordToggleHot) api.HandleFunc("/api/record/keep", recordKeepVideo) api.HandleFunc("/api/record/unkeep", recordUnkeepVideo) api.HandleFunc("/api/record/restore", recordRestoreVideo) + api.HandleFunc("/api/record/prepare-split", recordPrepareSplit) api.HandleFunc("/api/chaturbate/online", chaturbateOnlineHandler) api.HandleFunc("/api/chaturbate/biocontext", chaturbateBioContextHandler) diff --git a/backend/server.go b/backend/server.go index 8b6ebcd..d95d335 100644 --- a/backend/server.go +++ b/backend/server.go @@ -3,9 +3,13 @@ package main import ( + "context" "fmt" "net/http" "os" + "os/signal" + "syscall" + "time" ) // --- main --- @@ -18,16 +22,21 @@ func main() { go startGeneratedGarbageCollector() + // ✅ NSFW-ONNX Detector initialisieren + if err := initNSFWDetector(); err != nil { + fmt.Println("❌ NSFW-ONNX Fehler:", err) + os.Exit(1) + } + defer func() { + _ = closeNSFWDetector() + }() + mux := http.NewServeMux() - // ✅ AuthManager erstellen (Beispiel) - // Du brauchst hier typischerweise: - // - ein Secret/Key (Cookie signen / Sessions) - // - Username+Pass Hash oder config - // - optional 2FA store auth, err := NewAuthManager() if err != nil { fmt.Println("❌ auth init:", err) + _ = closeNSFWDetector() os.Exit(1) } @@ -45,8 +54,31 @@ func main() { fmt.Println("🌐 HTTP-API aktiv: http://localhost:9999") handler := withCORS(mux) - if err := http.ListenAndServe(":9999", handler); err != nil { + srv := &http.Server{ + Addr: ":9999", + Handler: handler, + } + + // Shutdown-Signale + stopSig := make(chan os.Signal, 1) + signal.Notify(stopSig, os.Interrupt, syscall.SIGTERM) + + go func() { + <-stopSig + fmt.Println("🛑 Beende Server...") + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + _ = srv.Shutdown(ctx) + _ = closeNSFWDetector() + }() + + if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { fmt.Println("❌ HTTP-Server Fehler:", err) + _ = closeNSFWDetector() os.Exit(1) } + + _ = closeNSFWDetector() } diff --git a/backend/settings.go b/backend/settings.go index cd2798b..c980a5b 100644 --- a/backend/settings.go +++ b/backend/settings.go @@ -30,6 +30,7 @@ type RecorderSettings struct { // Wenn aktiv, werden fertige Downloads automatisch gelöscht, wenn sie kleiner als der Grenzwert sind. AutoDeleteSmallDownloads bool `json:"autoDeleteSmallDownloads"` AutoDeleteSmallDownloadsBelowMB int `json:"autoDeleteSmallDownloadsBelowMB"` + LowDiskPauseBelowGB int `json:"lowDiskPauseBelowGB"` BlurPreviews bool `json:"blurPreviews"` TeaserPlayback string `json:"teaserPlayback"` // still | hover | all @@ -58,6 +59,7 @@ var ( UseMyFreeCamsWatcher: false, AutoDeleteSmallDownloads: false, AutoDeleteSmallDownloadsBelowMB: 50, + LowDiskPauseBelowGB: 5, BlurPreviews: false, TeaserPlayback: "hover", @@ -119,6 +121,12 @@ func loadSettings() { if s.AutoDeleteSmallDownloadsBelowMB > 100_000 { s.AutoDeleteSmallDownloadsBelowMB = 100_000 } + if s.LowDiskPauseBelowGB < 1 { + s.LowDiskPauseBelowGB = 1 + } + if s.LowDiskPauseBelowGB > 10_000 { + s.LowDiskPauseBelowGB = 10_000 + } settingsMu.Lock() settings = s @@ -205,6 +213,7 @@ type RecorderSettingsPublic struct { AutoDeleteSmallDownloads bool `json:"autoDeleteSmallDownloads"` AutoDeleteSmallDownloadsBelowMB int `json:"autoDeleteSmallDownloadsBelowMB"` + LowDiskPauseBelowGB int `json:"lowDiskPauseBelowGB"` BlurPreviews bool `json:"blurPreviews"` TeaserPlayback string `json:"teaserPlayback"` @@ -230,6 +239,7 @@ func toPublicSettings(s RecorderSettings) RecorderSettingsPublic { AutoDeleteSmallDownloads: s.AutoDeleteSmallDownloads, AutoDeleteSmallDownloadsBelowMB: s.AutoDeleteSmallDownloadsBelowMB, + LowDiskPauseBelowGB: s.LowDiskPauseBelowGB, BlurPreviews: s.BlurPreviews, TeaserPlayback: s.TeaserPlayback, @@ -294,6 +304,12 @@ func recordSettingsHandler(w http.ResponseWriter, r *http.Request) { if in.AutoDeleteSmallDownloadsBelowMB > 100_000 { in.AutoDeleteSmallDownloadsBelowMB = 100_000 } + if in.LowDiskPauseBelowGB < 1 { + in.LowDiskPauseBelowGB = 1 + } + if in.LowDiskPauseBelowGB > 10_000 { + in.LowDiskPauseBelowGB = 10_000 + } // --- ensure folders (Fehler zurückgeben, falls z.B. keine Rechte) --- recAbs, err := resolvePathRelativeToApp(in.RecordDir) diff --git a/backend/split.go b/backend/split.go new file mode 100644 index 0000000..f991eda --- /dev/null +++ b/backend/split.go @@ -0,0 +1,297 @@ +// backend\split.go + +package main + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "os" + "os/exec" + "path/filepath" + "sort" + "strconv" + "strings" + "time" +) + +type splitVideoRequest struct { + File string `json:"file"` // z. B. "model_01_01_2026__12-00-00.mp4" + Splits []float64 `json:"splits"` // Sekunden, z. B. [120.5, 300.0] +} + +type splitVideoSegmentResponse struct { + Index int `json:"index"` + Start float64 `json:"start"` + End float64 `json:"end"` + Duration float64 `json:"duration"` + File string `json:"file"` + Path string `json:"path"` +} + +type splitVideoResponse struct { + OK bool `json:"ok"` + File string `json:"file"` + Source string `json:"source"` + Segments []splitVideoSegmentResponse `json:"segments"` +} + +func recordSplitVideo(w http.ResponseWriter, r *http.Request) { + if !mustMethod(w, r, http.MethodPost) { + return + } + + var req splitVideoRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, "ungültiger JSON-Body: "+err.Error(), http.StatusBadRequest) + return + } + + req.File = strings.TrimSpace(req.File) + if req.File == "" { + http.Error(w, "file fehlt", http.StatusBadRequest) + return + } + if !isAllowedVideoExt(req.File) { + http.Error(w, "nur .mp4 oder .ts erlaubt", http.StatusBadRequest) + return + } + + s := getSettings() + doneAbs, err := resolvePathRelativeToApp(s.DoneDir) + if err != nil { + http.Error(w, "doneDir auflösung fehlgeschlagen: "+err.Error(), http.StatusInternalServerError) + return + } + if strings.TrimSpace(doneAbs) == "" { + http.Error(w, "doneDir ist leer", http.StatusBadRequest) + return + } + + srcPath, _, fi, err := resolveDoneFileByName(doneAbs, req.File) + if err != nil { + http.Error(w, "quelldatei nicht gefunden", http.StatusNotFound) + return + } + if fi == nil || fi.IsDir() || fi.Size() <= 0 { + http.Error(w, "quelldatei ungültig", http.StatusBadRequest) + return + } + + srcPath = filepath.Clean(srcPath) + + ctx, cancel := context.WithTimeout(r.Context(), 20*time.Second) + defer cancel() + + durationSec, err := durationSecondsCached(ctx, srcPath) + if err != nil || durationSec <= 0 { + http.Error(w, "videodauer konnte nicht ermittelt werden", http.StatusInternalServerError) + return + } + + points, err := normalizeSplitPoints(req.Splits, durationSec) + if err != nil { + http.Error(w, "splits ungültig: "+err.Error(), http.StatusBadRequest) + return + } + + if len(points) == 0 { + http.Error(w, "keine gültigen splits übergeben", http.StatusBadRequest) + return + } + + segments := buildSplitSegments(points, durationSec) + if len(segments) < 2 { + http.Error(w, "zu wenige segmente nach split-berechnung", http.StatusBadRequest) + return + } + + outDir := filepath.Join(filepath.Dir(srcPath), "_split") + if err := os.MkdirAll(outDir, 0o755); err != nil { + http.Error(w, "zielordner konnte nicht erstellt werden: "+err.Error(), http.StatusInternalServerError) + return + } + + base := strings.TrimSuffix(filepath.Base(srcPath), filepath.Ext(srcPath)) + ext := strings.ToLower(filepath.Ext(srcPath)) + if ext == "" { + ext = ".mp4" + } + + resp := splitVideoResponse{ + OK: true, + File: req.File, + Source: srcPath, + } + + for i, seg := range segments { + outName := fmt.Sprintf("%s__part_%02d%s", base, i+1, ext) + outPath := filepath.Join(outDir, outName) + + if err := splitSingleSegment(r.Context(), srcPath, outPath, seg.Start, seg.Duration); err != nil { + http.Error( + w, + fmt.Sprintf("segment %d konnte nicht erzeugt werden: %v", i+1, err), + http.StatusInternalServerError, + ) + return + } + + resp.Segments = append(resp.Segments, splitVideoSegmentResponse{ + Index: i + 1, + Start: seg.Start, + End: seg.End, + Duration: seg.Duration, + File: outName, + Path: outPath, + }) + } + + notifyDoneChanged() + respondJSON(w, resp) +} + +type normalizedSegment struct { + Start float64 + End float64 + Duration float64 +} + +func normalizeSplitPoints(raw []float64, duration float64) ([]float64, error) { + if duration <= 0 { + return nil, fmt.Errorf("duration <= 0") + } + + out := make([]float64, 0, len(raw)) + for _, v := range raw { + if v <= 0 { + continue + } + if v >= duration { + continue + } + out = append(out, v) + } + + if len(out) == 0 { + return nil, fmt.Errorf("alle split-punkte liegen außerhalb der videodauer") + } + + sort.Float64s(out) + + dedup := make([]float64, 0, len(out)) + for _, v := range out { + if len(dedup) == 0 || absFloat(dedup[len(dedup)-1]-v) >= 0.20 { + dedup = append(dedup, v) + } + } + + if len(dedup) == 0 { + return nil, fmt.Errorf("keine eindeutigen split-punkte übrig") + } + + return dedup, nil +} + +func buildSplitSegments(points []float64, duration float64) []normalizedSegment { + all := make([]float64, 0, len(points)+2) + all = append(all, 0) + all = append(all, points...) + all = append(all, duration) + + out := make([]normalizedSegment, 0, len(all)-1) + for i := 0; i < len(all)-1; i++ { + start := all[i] + end := all[i+1] + dur := end - start + if dur <= 0.10 { + continue + } + out = append(out, normalizedSegment{ + Start: start, + End: end, + Duration: dur, + }) + } + return out +} + +func splitSingleSegment(parentCtx context.Context, srcPath, outPath string, startSec, durSec float64) error { + if strings.TrimSpace(srcPath) == "" { + return fmt.Errorf("srcPath leer") + } + if strings.TrimSpace(outPath) == "" { + return fmt.Errorf("outPath leer") + } + if durSec <= 0 { + return fmt.Errorf("dauer <= 0") + } + + tmpPath := outPath + ".part" + + _ = os.Remove(tmpPath) + _ = os.Remove(outPath) + + ctx, cancel := context.WithTimeout(parentCtx, 2*time.Minute) + defer cancel() + + // Re-Encode ist robuster/framesauberer als -c copy + args := []string{ + "-y", + "-hide_banner", + "-loglevel", "error", + "-ss", formatFFSec(startSec), + "-i", srcPath, + "-t", formatFFSec(durSec), + + "-map", "0:v:0", + "-map", "0:a?", + "-c:v", "libx264", + "-preset", "veryfast", + "-crf", "20", + "-pix_fmt", "yuv420p", + "-c:a", "aac", + "-b:a", "128k", + "-movflags", "+faststart", + tmpPath, + } + + cmd := exec.CommandContext(ctx, ffmpegPath, args...) + var stderr bytes.Buffer + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil { + _ = os.Remove(tmpPath) + msg := strings.TrimSpace(stderr.String()) + if msg != "" { + return fmt.Errorf("%w (%s)", err, msg) + } + return err + } + + fi, err := os.Stat(tmpPath) + if err != nil || fi == nil || fi.IsDir() || fi.Size() <= 0 { + _ = os.Remove(tmpPath) + return fmt.Errorf("ffmpeg hat keine gültige datei erzeugt") + } + + if err := os.Rename(tmpPath, outPath); err != nil { + _ = os.Remove(tmpPath) + return fmt.Errorf("rename fehlgeschlagen: %w", err) + } + + return nil +} + +func formatFFSec(v float64) string { + return strconv.FormatFloat(v, 'f', 3, 64) +} + +func absFloat(v float64) float64 { + if v < 0 { + return -v + } + return v +} diff --git a/backend/tasks_assets.go b/backend/tasks_assets.go index 976168f..d57665b 100644 --- a/backend/tasks_assets.go +++ b/backend/tasks_assets.go @@ -301,7 +301,13 @@ func runGenerateMissingAssets(ctx context.Context) { return } - // ✅ Progress + Counters + SSE Push + if _, aerr := prepareVideoForSplit(ctx, it.path, sourceURL, "nsfw"); aerr != nil { + updateAssetsState(func(st *AssetsTaskState) { + st.Error = "mindestens ein Eintrag konnte nicht vollständig analysiert werden (siehe Logs)" + }) + fmt.Println("⚠️ tasks generate assets analyze:", aerr) + } + updateAssetsState(func(st *AssetsTaskState) { if res.Skipped { st.Skipped++ diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index fae6541..3484b5c 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -19,6 +19,7 @@ import { useNotify } from './components/ui/notify' //import { startChaturbateOnlinePolling } from './lib/chaturbateOnlinePoller' import CategoriesTab from './components/ui/CategoriesTab' import LoginPage from './components/ui/LoginPage' +import VideoSplitModal from './components/ui/VideoSplitModal' const COOKIE_STORAGE_KEY = 'record_cookies' @@ -101,7 +102,7 @@ const DEFAULT_RECORDER_SETTINGS: RecorderSettingsState = { blurPreviews: false, teaserPlayback: 'hover', teaserAudio: false, - lowDiskPauseBelowGB: 3000, + lowDiskPauseBelowGB: 5, } type StoredModel = { @@ -295,6 +296,13 @@ function mfcUserFromUrl(normUrl: string): string { const baseName = (p: string) => (p || '').replaceAll('\\', '/').split('/').pop() || '' +function videoSrcFromJob(job: RecordJob | null): string { + if (!job) return '' + const file = baseName(job.output || '') + if (!file) return '' + return `/api/record/video?file=${encodeURIComponent(file)}` +} + function replaceBasename(fullPath: string, newBase: string) { const norm = (fullPath || '').replaceAll('\\', '/') const parts = norm.split('/') @@ -443,6 +451,9 @@ export default function App() { const [playerJob, setPlayerJob] = useState(null) const [playerExpanded, setPlayerExpanded] = useState(false) const [playerStartAtSec, setPlayerStartAtSec] = useState(null) + const [splitJob, setSplitJob] = useState(null) + const [splitModalOpen, setSplitModalOpen] = useState(false) + const [splitModalKey, setSplitModalKey] = useState(0) const [assetNonce, setAssetNonce] = useState(0) const bumpAssets = useCallback(() => setAssetNonce((n) => n + 1), []) @@ -516,6 +527,9 @@ export default function App() { setDoneCount(0) setDonePage(1) + setSplitJob(null) + setSplitModalOpen(false) + setModelsByKey({}) setModelsCount(0) @@ -1154,6 +1168,40 @@ export default function App() { return () => window.removeEventListener('open-model-details', onOpen as any) }, []) + useEffect(() => { + const onOpen = (ev: Event) => { + const e = ev as CustomEvent<{ jobId?: string; output?: string }> + const jobId = String(e.detail?.jobId ?? '').trim() + const output = String(e.detail?.output ?? '').trim() + + let hit: RecordJob | null = null + + if (jobId) { + hit = + jobs.find((j) => String((j as any)?.id ?? '').trim() === jobId) ?? + doneJobs.find((j) => String((j as any)?.id ?? '').trim() === jobId) ?? + null + } + + if (!hit && output) { + const wanted = baseName(output) + hit = + jobs.find((j) => baseName(j.output || '') === wanted) ?? + doneJobs.find((j) => baseName(j.output || '') === wanted) ?? + null + } + + if (hit) { + setSplitJob(hit) + setSplitModalKey((k) => k + 1) + setSplitModalOpen(true) + } + } + + window.addEventListener('open-video-splitter', onOpen as EventListener) + return () => window.removeEventListener('open-video-splitter', onOpen as EventListener) + }, [jobs, doneJobs]) + const upsertModelCache = useCallback((m: StoredModel) => { const now = Date.now() const cur = modelsCacheRef.current @@ -1240,7 +1288,6 @@ export default function App() { }, [jobs]) // pending start falls gerade busy - const pendingStartUrlRef = useRef(null) const lastClipboardUrlRef = useRef('') // --- START QUEUE (parallel) --- @@ -1649,6 +1696,19 @@ export default function App() { setRoomStatusByModelKey(next) }, [jobs, modelsByKey, recSettings.useChaturbateApi]) + function shouldQueueForRoomStatus( + show: string + ): boolean { + const s = String(show || '').trim().toLowerCase() + return ( + s === 'private' || + s === 'hidden' || + s === 'away' || + s === 'offline' || + s === 'unknown' + ) + } + // ✅ StartURL (hier habe ich den alten Online-Fetch entfernt und nur Snapshot genutzt) const startUrl = useCallback(async (rawUrl: string, opts?: { silent?: boolean }): Promise => { const norm0 = normalizeHttpUrl(rawUrl) @@ -1694,10 +1754,20 @@ export default function App() { }) const mkLower = String(parsed?.modelKey ?? '').trim().toLowerCase() + if (mkLower) { - const upsertPendingRow = (showRaw?: unknown) => { + const upsertPendingRow = (opts?: { + show?: unknown + imageUrl?: string + chatRoomUrl?: string + }) => { const model = modelsByKeyRef.current[mkLower] as any - const show = normalizePendingShow(showRaw ?? model?.roomStatus) + const show = normalizePendingShow(opts?.show ?? model?.roomStatus) + + const imageUrl = + String(opts?.imageUrl ?? '').trim() || + String(model?.imageUrl ?? '').trim() || + undefined setPendingWatchedRooms((prev) => { const nextItem: PendingWatchedRoom = { @@ -1705,10 +1775,13 @@ export default function App() { modelKey: mkLower, url: norm, currentShow: show, - imageUrl: String(model?.imageUrl ?? '').trim() || undefined, + imageUrl, } - const idx = prev.findIndex((x) => String(x.modelKey ?? '').trim().toLowerCase() === mkLower) + const idx = prev.findIndex( + (x) => String(x.modelKey ?? '').trim().toLowerCase() === mkLower + ) + if (idx >= 0) { const copy = [...prev] copy[idx] = { ...copy[idx], ...nextItem } @@ -1719,31 +1792,95 @@ export default function App() { }) } - // 1) Wenn bereits busy: immer in Waiting + const enqueuePending = (opts?: { + show?: unknown + imageUrl?: string + chatRoomUrl?: string + }) => { + setPendingAutoStartByKey((prev) => { + const next = { ...(prev || {}), [mkLower]: norm } + pendingAutoStartByKeyRef.current = next + return next + }) + + upsertPendingRow(opts) + + applyPendingRoomSnapshot(mkLower, { + show: normalizePendingShow(opts?.show), + imageUrl: String(opts?.imageUrl ?? '').trim() || undefined, + chatRoomUrl: String(opts?.chatRoomUrl ?? '').trim() || undefined, + }) + } + + // Wenn gerade andere Starts laufen -> direkt in Warteschlange if (busyRef.current) { - setPendingAutoStartByKey((prev) => ({ ...(prev || {}), [mkLower]: norm })) - upsertPendingRow('public') + enqueuePending({ show: 'unknown' }) return true } - // 2) aktuellen room_status aus dem Store prüfen - const model = modelsByKeyRef.current[mkLower] as any - const show = normalizePendingShow(model?.roomStatus) + // Live current_show prüfen + const live = await fetchChaturbateCurrentShow(mkLower) + const liveShow = normalizePendingShow(live?.show) - if (show === 'private' || show === 'hidden' || show === 'away') { - setPendingAutoStartByKey((prev) => ({ ...(prev || {}), [mkLower]: norm })) - upsertPendingRow(show) + if (shouldQueueForRoomStatus(liveShow)) { + enqueuePending({ + show: liveShow, + imageUrl: live?.imageUrl, + chatRoomUrl: live?.chatRoomUrl, + }) return true } + + // public -> Snapshot aktualisieren und normal starten + applyPendingRoomSnapshot(mkLower, { + show: liveShow, + imageUrl: live?.imageUrl, + chatRoomUrl: live?.chatRoomUrl, + }) } } catch { - // parse fail -> normal starten - } - } else { - // Nicht-Chaturbate-API: wenn busy, wenigstens "pendingStart" setzen - if (busyRef.current) { - pendingStartUrlRef.current = norm - return true + // Wenn Live-Check fehlschlägt: lieber in Warteschlange statt blind starten + try { + const parsed = await apiJSON('/api/models/parse', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ input: norm }), + }) + + const mkLower = String(parsed?.modelKey ?? '').trim().toLowerCase() + if (mkLower) { + setPendingAutoStartByKey((prev) => { + const next = { ...(prev || {}), [mkLower]: norm } + pendingAutoStartByKeyRef.current = next + return next + }) + + setPendingWatchedRooms((prev) => { + const nextItem: PendingWatchedRoom = { + id: mkLower, + modelKey: mkLower, + url: norm, + currentShow: 'unknown', + } + + const idx = prev.findIndex( + (x) => String(x.modelKey ?? '').trim().toLowerCase() === mkLower + ) + + if (idx >= 0) { + const copy = [...prev] + copy[idx] = { ...copy[idx], ...nextItem } + return copy + } + + return [nextItem, ...prev] + }) + + return true + } + } catch { + // parse fail -> normal starten + } } } @@ -1855,6 +1992,12 @@ export default function App() { ) }, []) + const openSplitModal = useCallback((job: RecordJob) => { + setSplitJob(job) + setSplitModalKey((k) => k + 1) + setSplitModalOpen(true) + }, []) + // ✅ Anzahl Watched Models (aus Store), die online sind const onlineWatchedModelsCount = useMemo(() => { let c = 0 @@ -2003,12 +2146,6 @@ export default function App() { document.removeEventListener('visibilitychange', onVis) } }, [selectedTab, loadDoneCount, requestFinishedReload]) - - - useEffect(() => { - const maxPage = Math.max(1, Math.ceil(doneCount / DONE_PAGE_SIZE)) - if (donePage > maxPage) setDonePage(maxPage) - }, [doneCount, donePage]) useEffect(() => { if (!authed) return @@ -3103,6 +3240,7 @@ export default function App() { onToggleLike={handleToggleLike} onToggleWatch={handleToggleWatch} onKeepJob={handleKeepJob} + onSplitJob={openSplitModal} blurPreviews={Boolean(recSettings.blurPreviews)} teaserPlayback={recSettings.teaserPlayback ?? 'hover'} teaserAudio={Boolean(recSettings.teaserAudio)} @@ -3159,6 +3297,26 @@ export default function App() { onStopJob={stopJob} /> + { + setSplitModalOpen(false) + setSplitJob(null) + }} + onApply={async ({ job, splits, segments }) => { + console.log('VIDEO_SPLIT_APPLY', { + jobId: job.id, + output: job.output, + splits, + segments, + }) + }} + /> + {playerJob ? ( { case 'assets': return 'Erstelle Vorschau/Thumbnails…' + case 'analyze': + return 'AI analysiert Segmente…' + case 'postwork': return 'Nacharbeiten laufen…' @@ -464,15 +467,15 @@ function DownloadsCardRow({ return (
{/* subtle gradient */} -
+
@@ -613,8 +616,8 @@ function DownloadsCardRow({ return (
+
@@ -752,7 +755,8 @@ function DownloadsCardRow({