Zone A
+AI Media Asset Ingest Hub
+Multimodal Discovery, Drag Upload, Direct Clip Ingest
+Zone B
+Smart Ingest Dropzone
+ + +Zone C
+diff --git a/.gitea/workflows/build-push.yaml b/.gitea/workflows/build-push.yaml new file mode 100644 index 0000000..c42e503 --- /dev/null +++ b/.gitea/workflows/build-push.yaml @@ -0,0 +1,37 @@ +name: build-push + +on: + push: + branches: + - main + workflow_dispatch: + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to registry + uses: docker/login-action@v3 + with: + registry: git.savethenurse.com + username: ${{ secrets.REGISTRY_USERNAME }} + password: ${{ secrets.REGISTRY_PASSWORD }} + + - name: Build and push + uses: docker/build-push-action@v6 + with: + context: . + push: true + platforms: linux/amd64 + tags: | + git.savethenurse.com/savethenurse/ai-media-hub:latest + git.savethenurse.com/savethenurse/ai-media-hub:${{ github.sha }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..325181c --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +db/*.db +db/.DS_Store +downloads/* +!downloads/.gitkeep +worker/__pycache__/ +*.pyc +node_modules/ +dist/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..5ce80eb --- /dev/null +++ b/Dockerfile @@ -0,0 +1,28 @@ +FROM golang:1.24-bookworm AS go-builder +WORKDIR /src +COPY go.mod ./ +RUN go mod download +COPY backend ./backend +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o /out/ai-media-hub ./backend + +FROM python:3.12-slim-bookworm +ENV APP_ROOT=/app +WORKDIR /app + +RUN apt-get update \ + && apt-get install -y --no-install-recommends ffmpeg ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +COPY worker/requirements.txt /app/worker/requirements.txt +RUN pip install --no-cache-dir -r /app/worker/requirements.txt + +COPY --from=go-builder /out/ai-media-hub /app/ai-media-hub +COPY backend /app/backend +COPY worker /app/worker +COPY frontend /app/frontend +COPY db /app/db +COPY downloads /app/downloads + +EXPOSE 8080 + +CMD ["/app/ai-media-hub"] diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..a999f42 --- /dev/null +++ b/TODO.md @@ -0,0 +1,13 @@ +- [x] 1. 프로젝트 폴더 구조 생성 +- [x] 2. Dockerfile 및 Unraid XML 템플릿 작성 +- [x] 3. Gitea Actions CI/CD 파일 작성 (.gitea/workflows/build-push.yaml) +- [x] 4. SQLite DB 모델 및 초기화 로직 (backend/models) +- [x] 5. Python yt-dlp 워커 스크립트 작성 (worker/downloader.py) +- [x] 6. Go 백엔드 라우팅 및 파일 업로드(Zone B) 구현 +- [x] 7. Go - Python 연동 로직 (Zone C 다운로드 실행) +- [x] 8. Google CSE 및 Gemini 2.5 Flash 연동 로직 (Zone A) +- [x] 9. WebSocket 서버 및 진행률 방송 로직 +- [x] 10. 프론트엔드 메인 UI 구성 (Tailwind 3-Zone Layout) +- [x] 11. 프론트엔드 JS 통신 로직 및 상태 바 렌더링 연동 +- [x] 12. 전체 기능 통합 테스트 +- [ ] 13. Git Init 및 자동 Push (Gitea) diff --git a/backend/handlers/api.go b/backend/handlers/api.go new file mode 100644 index 0000000..7c56cca --- /dev/null +++ b/backend/handlers/api.go @@ -0,0 +1,282 @@ +package handlers + +import ( + "bufio" + "database/sql" + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + "os/exec" + "path/filepath" + "regexp" + "strings" + "sync" + "time" + + "ai-media-hub/backend/models" + "ai-media-hub/backend/services" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/gorilla/websocket" +) + +type App struct { + DB *sql.DB + DownloadsDir string + WorkerScript string + SearchService *services.SearchService + GeminiService *services.GeminiService + Hub *Hub +} + +type Hub struct { + clients map[*websocket.Conn]bool + mu sync.Mutex +} + +func NewHub() *Hub { + return &Hub{clients: map[*websocket.Conn]bool{}} +} + +func (h *Hub) Broadcast(event string, data any) { + h.mu.Lock() + defer h.mu.Unlock() + + payload, _ := json.Marshal(gin.H{"event": event, "data": data}) + for conn := range h.clients { + _ = conn.WriteMessage(websocket.TextMessage, payload) + } +} + +func (h *Hub) Add(conn *websocket.Conn) { + h.mu.Lock() + defer h.mu.Unlock() + h.clients[conn] = true +} + +func (h *Hub) Remove(conn *websocket.Conn) { + h.mu.Lock() + defer h.mu.Unlock() + delete(h.clients, conn) + _ = conn.Close() +} + +func RegisterRoutes(router *gin.Engine, app *App) { + router.GET("/healthz", func(c *gin.Context) { + c.JSON(http.StatusOK, gin.H{"status": "ok"}) + }) + router.GET("/ws", app.handleWS) + router.GET("/api/history/check", app.checkDuplicate) + router.POST("/api/upload", app.uploadFile) + router.POST("/api/download", app.startDownload) + router.POST("/api/search", app.searchMedia) +} + +func (a *App) handleWS(c *gin.Context) { + upgrader := websocket.Upgrader{ + CheckOrigin: func(r *http.Request) bool { return true }, + } + conn, err := upgrader.Upgrade(c.Writer, c.Request, nil) + if err != nil { + return + } + a.Hub.Add(conn) + defer a.Hub.Remove(conn) + + for { + if _, _, err := conn.ReadMessage(); err != nil { + return + } + } +} + +func (a *App) checkDuplicate(c *gin.Context) { + url := strings.TrimSpace(c.Query("url")) + if url == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "url is required"}) + return + } + record, err := models.FindByURL(a.DB, url) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, gin.H{"exists": record != nil, "record": record}) +} + +func (a *App) uploadFile(c *gin.Context) { + file, err := c.FormFile("file") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "file is required"}) + return + } + + a.Hub.Broadcast("progress", gin.H{"type": "upload", "status": "started", "progress": 5, "filename": file.Filename}) + + safeName := normalizeFilename(file.Filename) + targetPath := filepath.Join(a.DownloadsDir, safeName) + if err := c.SaveUploadedFile(file, targetPath); err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + a.Hub.Broadcast("progress", gin.H{"type": "upload", "status": "completed", "progress": 100, "filename": safeName}) + c.JSON(http.StatusOK, gin.H{"message": "uploaded", "path": targetPath, "filename": safeName}) +} + +func (a *App) startDownload(c *gin.Context) { + var req struct { + URL string `json:"url"` + Start string `json:"start"` + End string `json:"end"` + Force bool `json:"force"` + } + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + rec, err := models.FindByURL(a.DB, req.URL) + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + if rec != nil && !req.Force { + c.JSON(http.StatusConflict, gin.H{"error": "duplicate url", "record": rec}) + return + } + + outputBase := uuid.NewString() + outputPath := filepath.Join(a.DownloadsDir, outputBase+".mp4") + recordID, err := models.InsertDownload(a.DB, req.URL, detectSource(req.URL), outputPath, "queued") + if err != nil { + c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) + return + } + + go a.runDownload(recordID, req.URL, req.Start, req.End, outputPath) + c.JSON(http.StatusAccepted, gin.H{"message": "download started", "recordId": recordID}) +} + +func (a *App) runDownload(recordID int64, url, start, end, outputPath string) { + a.Hub.Broadcast("progress", gin.H{"type": "download", "status": "queued", "progress": 0, "url": url}) + cmd := exec.Command("python3", a.WorkerScript, "--url", url, "--start", start, "--end", end, "--output", outputPath) + stdout, err := cmd.StdoutPipe() + if err != nil { + a.Hub.Broadcast("progress", gin.H{"type": "download", "status": "error", "progress": 0, "message": err.Error()}) + _ = models.MarkDownloadCompleted(a.DB, recordID, "failed") + return + } + cmd.Stderr = cmd.Stdout + + if err := cmd.Start(); err != nil { + a.Hub.Broadcast("progress", gin.H{"type": "download", "status": "error", "progress": 0, "message": err.Error()}) + _ = models.MarkDownloadCompleted(a.DB, recordID, "failed") + return + } + + scanner := bufio.NewScanner(stdout) + for scanner.Scan() { + line := scanner.Bytes() + var msg map[string]any + if err := json.Unmarshal(line, &msg); err == nil { + msg["type"] = "download" + a.Hub.Broadcast("progress", msg) + } + } + + status := "completed" + if err := cmd.Wait(); err != nil { + status = "failed" + a.Hub.Broadcast("progress", gin.H{"type": "download", "status": "error", "progress": 100, "message": err.Error()}) + } + _ = models.MarkDownloadCompleted(a.DB, recordID, status) +} + +func (a *App) searchMedia(c *gin.Context) { + var req struct { + Query string `json:"query"` + } + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if strings.TrimSpace(req.Query) == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "query is required"}) + return + } + + results, err := a.SearchService.SearchMedia(req.Query) + if err != nil { + c.JSON(http.StatusBadGateway, gin.H{"error": err.Error()}) + return + } + + recommended, err := a.GeminiService.Recommend(req.Query, results) + if err != nil { + fallback := make([]services.AIRecommendation, 0, min(4, len(results))) + for _, result := range results[:min(4, len(results))] { + fallback = append(fallback, services.AIRecommendation{ + Title: result.Title, + Link: result.Link, + ThumbnailURL: result.ThumbnailURL, + Source: result.Source, + Reason: "Gemini recommendation failed, showing raw search result.", + Recommended: true, + }) + } + c.JSON(http.StatusOK, gin.H{"results": fallback, "warning": err.Error()}) + return + } + + c.JSON(http.StatusOK, gin.H{"results": recommended}) +} + +func normalizeFilename(name string) string { + base := strings.ToLower(strings.TrimSpace(name)) + ext := filepath.Ext(base) + base = strings.TrimSuffix(base, ext) + re := regexp.MustCompile(`[^a-z0-9]+`) + base = strings.Trim(re.ReplaceAllString(base, "-"), "-") + if base == "" { + base = fmt.Sprintf("upload-%d", time.Now().Unix()) + } + if ext == "" { + ext = ".bin" + } + return base + ext +} + +func detectSource(url string) string { + switch { + case strings.Contains(url, "youtube"): + return "YouTube" + case strings.Contains(url, "tiktok"): + return "TikTok" + default: + return "direct" + } +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func EnsurePaths(downloadsDir, workerScript string) error { + if err := os.MkdirAll(downloadsDir, 0o755); err != nil { + return err + } + if _, err := os.Stat(workerScript); err != nil { + if errors.Is(err, os.ErrNotExist) { + return fmt.Errorf("worker script not found: %s", workerScript) + } + return err + } + return nil +} diff --git a/backend/main.go b/backend/main.go new file mode 100644 index 0000000..08da603 --- /dev/null +++ b/backend/main.go @@ -0,0 +1,66 @@ +package main + +import ( + "log" + "net/http" + "os" + "path/filepath" + + "ai-media-hub/backend/handlers" + "ai-media-hub/backend/models" + "ai-media-hub/backend/services" + + "github.com/gin-gonic/gin" +) + +func main() { + root := envOrDefault("APP_ROOT", "/app") + dbPath := envOrDefault("SQLITE_PATH", filepath.Join(root, "db", "media.db")) + downloadsDir := envOrDefault("DOWNLOADS_DIR", filepath.Join(root, "downloads")) + frontendDir := envOrDefault("FRONTEND_DIR", filepath.Join(root, "frontend")) + workerScript := envOrDefault("WORKER_SCRIPT", filepath.Join(root, "worker", "downloader.py")) + + db, err := models.InitDB(dbPath) + if err != nil { + log.Fatal(err) + } + defer db.Close() + + if err := handlers.EnsurePaths(downloadsDir, workerScript); err != nil { + log.Fatal(err) + } + + app := &handlers.App{ + DB: db, + DownloadsDir: downloadsDir, + WorkerScript: workerScript, + SearchService: services.NewSearchService(os.Getenv("GOOGLE_CSE_API_KEY"), os.Getenv("GOOGLE_CSE_CX")), + GeminiService: services.NewGeminiService(os.Getenv("GEMINI_API_KEY")), + Hub: handlers.NewHub(), + } + + router := gin.Default() + handlers.RegisterRoutes(router, app) + router.StaticFile("/", filepath.Join(frontendDir, "index.html")) + router.StaticFile("/app.js", filepath.Join(frontendDir, "app.js")) + router.StaticFile("/style.css", filepath.Join(frontendDir, "style.css")) + router.NoRoute(func(c *gin.Context) { + c.File(filepath.Join(frontendDir, "index.html")) + }) + router.NoMethod(func(c *gin.Context) { + c.JSON(http.StatusMethodNotAllowed, gin.H{"error": "method not allowed"}) + }) + + addr := envOrDefault("APP_ADDR", ":8080") + log.Printf("server listening on %s", addr) + if err := router.Run(addr); err != nil { + log.Fatal(err) + } +} + +func envOrDefault(key, fallback string) string { + if value := os.Getenv(key); value != "" { + return value + } + return fallback +} diff --git a/backend/models/db.go b/backend/models/db.go new file mode 100644 index 0000000..860299f --- /dev/null +++ b/backend/models/db.go @@ -0,0 +1,92 @@ +package models + +import ( + "database/sql" + "errors" + "os" + "path/filepath" + "time" + + _ "modernc.org/sqlite" +) + +type DownloadRecord struct { + ID int64 `json:"id"` + URL string `json:"url"` + Source string `json:"source"` + OutputPath string `json:"outputPath"` + Status string `json:"status"` + StartedAt time.Time `json:"startedAt"` + CompletedAt time.Time `json:"completedAt,omitempty"` +} + +func InitDB(path string) (*sql.DB, error) { + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + return nil, err + } + db, err := sql.Open("sqlite", path) + if err != nil { + return nil, err + } + + schema := ` + CREATE TABLE IF NOT EXISTS download_history ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + url TEXT NOT NULL, + source TEXT NOT NULL, + output_path TEXT NOT NULL, + status TEXT NOT NULL, + started_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + completed_at DATETIME + ); + CREATE INDEX IF NOT EXISTS idx_download_history_url ON download_history(url); + ` + + if _, err := db.Exec(schema); err != nil { + return nil, err + } + + return db, nil +} + +func InsertDownload(db *sql.DB, url, source, outputPath, status string) (int64, error) { + res, err := db.Exec( + `INSERT INTO download_history (url, source, output_path, status) VALUES (?, ?, ?, ?)`, + url, source, outputPath, status, + ) + if err != nil { + return 0, err + } + return res.LastInsertId() +} + +func MarkDownloadCompleted(db *sql.DB, id int64, status string) error { + _, err := db.Exec( + `UPDATE download_history SET status = ?, completed_at = CURRENT_TIMESTAMP WHERE id = ?`, + status, id, + ) + return err +} + +func FindByURL(db *sql.DB, url string) (*DownloadRecord, error) { + row := db.QueryRow( + `SELECT id, url, source, output_path, status, started_at, COALESCE(completed_at, '') FROM download_history WHERE url = ? ORDER BY id DESC LIMIT 1`, + url, + ) + + var rec DownloadRecord + var completedRaw string + if err := row.Scan(&rec.ID, &rec.URL, &rec.Source, &rec.OutputPath, &rec.Status, &rec.StartedAt, &completedRaw); err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, nil + } + return nil, err + } + if completedRaw != "" { + parsed, err := time.Parse("2006-01-02 15:04:05", completedRaw) + if err == nil { + rec.CompletedAt = parsed + } + } + return &rec, nil +} diff --git a/backend/services/cse.go b/backend/services/cse.go new file mode 100644 index 0000000..1d38405 --- /dev/null +++ b/backend/services/cse.go @@ -0,0 +1,116 @@ +package services + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" + "time" +) + +type SearchResult struct { + Title string `json:"title"` + Link string `json:"link"` + DisplayLink string `json:"displayLink"` + Snippet string `json:"snippet"` + ThumbnailURL string `json:"thumbnailUrl"` + Source string `json:"source"` +} + +type SearchService struct { + APIKey string + CX string + Client *http.Client +} + +func NewSearchService(apiKey, cx string) *SearchService { + return &SearchService{ + APIKey: apiKey, + CX: cx, + Client: &http.Client{Timeout: 20 * time.Second}, + } +} + +func (s *SearchService) SearchMedia(query string) ([]SearchResult, error) { + if s.APIKey == "" || s.CX == "" { + return nil, fmt.Errorf("google cse credentials are not configured") + } + + domains := []string{"youtube.com", "tiktok.com", "envato.com", "artgrid.io"} + siteQuery := strings.Join(domains, " OR site:") + fullQuery := fmt.Sprintf("%s (site:%s)", query, siteQuery) + + values := url.Values{} + values.Set("key", s.APIKey) + values.Set("cx", s.CX) + values.Set("q", fullQuery) + values.Set("searchType", "image") + values.Set("num", "10") + values.Set("safe", "off") + + results := make([]SearchResult, 0, 30) + seen := map[string]bool{} + for _, start := range []string{"1", "11", "21"} { + values.Set("start", start) + endpoint := "https://www.googleapis.com/customsearch/v1?" + values.Encode() + resp, err := s.Client.Get(endpoint) + if err != nil { + return nil, err + } + + if resp.StatusCode >= 300 { + resp.Body.Close() + return nil, fmt.Errorf("google cse returned status %d", resp.StatusCode) + } + + var payload struct { + Items []struct { + Title string `json:"title"` + Link string `json:"link"` + DisplayLink string `json:"displayLink"` + Snippet string `json:"snippet"` + Image struct { + ThumbnailLink string `json:"thumbnailLink"` + } `json:"image"` + } `json:"items"` + } + + if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil { + resp.Body.Close() + return nil, err + } + resp.Body.Close() + + for _, item := range payload.Items { + if item.Link == "" || seen[item.Link] { + continue + } + seen[item.Link] = true + results = append(results, SearchResult{ + Title: item.Title, + Link: item.Link, + DisplayLink: item.DisplayLink, + Snippet: item.Snippet, + ThumbnailURL: item.Image.ThumbnailLink, + Source: inferSource(item.DisplayLink), + }) + } + } + return results, nil +} + +func inferSource(displayLink string) string { + switch { + case strings.Contains(displayLink, "youtube"): + return "YouTube" + case strings.Contains(displayLink, "tiktok"): + return "TikTok" + case strings.Contains(displayLink, "envato"): + return "Envato" + case strings.Contains(displayLink, "artgrid"): + return "Artgrid" + default: + return displayLink + } +} diff --git a/backend/services/gemini.go b/backend/services/gemini.go new file mode 100644 index 0000000..22117e2 --- /dev/null +++ b/backend/services/gemini.go @@ -0,0 +1,175 @@ +package services + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "mime" + "net/http" + "strings" + "time" +) + +type GeminiService struct { + APIKey string + Client *http.Client +} + +type AIRecommendation struct { + Title string `json:"title"` + Link string `json:"link"` + ThumbnailURL string `json:"thumbnailUrl"` + Source string `json:"source"` + Reason string `json:"reason"` + Recommended bool `json:"recommended"` +} + +func NewGeminiService(apiKey string) *GeminiService { + return &GeminiService{ + APIKey: apiKey, + Client: &http.Client{Timeout: 40 * time.Second}, + } +} + +func (g *GeminiService) Recommend(query string, candidates []SearchResult) ([]AIRecommendation, error) { + if g.APIKey == "" { + return nil, fmt.Errorf("gemini api key is not configured") + } + if len(candidates) == 0 { + return []AIRecommendation{}, nil + } + + type geminiPart map[string]any + parts := []geminiPart{ + { + "text": `Analyze the provided images for the user's search intent. Return JSON only in this shape: +{"recommendations":[{"index":0,"reason":"short reason","recommended":true}]} +Mark only the best matches as recommended=true. Keep reasons concise. User query: ` + query, + }, + } + + maxImages := min(len(candidates), 8) + for idx := 0; idx < maxImages; idx++ { + img, mimeType, err := fetchImageAsInlineData(g.Client, candidates[idx].ThumbnailURL) + if err != nil { + continue + } + parts = append(parts, + geminiPart{"text": fmt.Sprintf("Candidate %d: title=%s source=%s link=%s", idx, candidates[idx].Title, candidates[idx].Source, candidates[idx].Link)}, + geminiPart{"inlineData": map[string]string{"mimeType": mimeType, "data": img}}, + ) + } + + body := map[string]any{ + "contents": []map[string]any{ + {"parts": parts}, + }, + "generationConfig": map[string]any{ + "responseMimeType": "application/json", + }, + } + + rawBody, _ := json.Marshal(body) + endpoint := "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent?key=" + g.APIKey + resp, err := g.Client.Post(endpoint, "application/json", bytes.NewReader(rawBody)) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode >= 300 { + data, _ := io.ReadAll(io.LimitReader(resp.Body, 2048)) + return nil, fmt.Errorf("gemini returned status %d: %s", resp.StatusCode, string(data)) + } + + var payload struct { + Candidates []struct { + Content struct { + Parts []struct { + Text string `json:"text"` + } `json:"parts"` + } `json:"content"` + } `json:"candidates"` + } + if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil { + return nil, err + } + if len(payload.Candidates) == 0 || len(payload.Candidates[0].Content.Parts) == 0 { + return nil, fmt.Errorf("gemini returned no candidates") + } + + var parsed struct { + Recommendations []struct { + Index int `json:"index"` + Reason string `json:"reason"` + Recommended bool `json:"recommended"` + } `json:"recommendations"` + } + if err := json.Unmarshal([]byte(payload.Candidates[0].Content.Parts[0].Text), &parsed); err != nil { + return nil, err + } + + recommendations := make([]AIRecommendation, 0, len(parsed.Recommendations)) + for _, rec := range parsed.Recommendations { + if rec.Index < 0 || rec.Index >= len(candidates) || !rec.Recommended { + continue + } + src := candidates[rec.Index] + recommendations = append(recommendations, AIRecommendation{ + Title: src.Title, + Link: src.Link, + ThumbnailURL: src.ThumbnailURL, + Source: src.Source, + Reason: rec.Reason, + Recommended: true, + }) + } + + if len(recommendations) == 0 { + for _, candidate := range candidates[:min(4, len(candidates))] { + recommendations = append(recommendations, AIRecommendation{ + Title: candidate.Title, + Link: candidate.Link, + ThumbnailURL: candidate.ThumbnailURL, + Source: candidate.Source, + Reason: "Fallback result because Gemini returned no recommended items.", + Recommended: true, + }) + } + } + + return recommendations, nil +} + +func fetchImageAsInlineData(client *http.Client, imageURL string) (string, string, error) { + resp, err := client.Get(imageURL) + if err != nil { + return "", "", err + } + defer resp.Body.Close() + + if resp.StatusCode >= 300 { + return "", "", fmt.Errorf("thumbnail fetch failed with %d", resp.StatusCode) + } + + contentType := resp.Header.Get("Content-Type") + mimeType, _, _ := mime.ParseMediaType(contentType) + if mimeType == "" || !strings.HasPrefix(mimeType, "image/") { + mimeType = "image/jpeg" + } + + data, err := io.ReadAll(io.LimitReader(resp.Body, 2*1024*1024)) + if err != nil { + return "", "", err + } + return base64.StdEncoding.EncodeToString(data), mimeType, nil +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/db/.gitkeep b/db/.gitkeep new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/db/.gitkeep @@ -0,0 +1 @@ + diff --git a/downloads/.gitkeep b/downloads/.gitkeep new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/downloads/.gitkeep @@ -0,0 +1 @@ + diff --git a/frontend/app.js b/frontend/app.js new file mode 100644 index 0000000..acf74a5 --- /dev/null +++ b/frontend/app.js @@ -0,0 +1,158 @@ +const statusBar = document.getElementById("statusBar"); +const statusLabel = document.getElementById("statusLabel"); +const searchForm = document.getElementById("searchForm"); +const searchQuery = document.getElementById("searchQuery"); +const searchResults = document.getElementById("searchResults"); +const searchWarning = document.getElementById("searchWarning"); +const dropzone = document.getElementById("dropzone"); +const fileInput = document.getElementById("fileInput"); +const uploadResult = document.getElementById("uploadResult"); +const downloadForm = document.getElementById("downloadForm"); +const downloadUrl = document.getElementById("downloadUrl"); +const startTime = document.getElementById("startTime"); +const endTime = document.getElementById("endTime"); +const downloadResult = document.getElementById("downloadResult"); +const cardTemplate = document.getElementById("searchCardTemplate"); + +function setStatus(label, progress) { + statusLabel.textContent = label; + statusBar.style.width = `${Math.max(0, Math.min(100, progress))}%`; +} + +function connectWS() { + const protocol = window.location.protocol === "https:" ? "wss" : "ws"; + const socket = new WebSocket(`${protocol}://${window.location.host}/ws`); + socket.addEventListener("message", (event) => { + const payload = JSON.parse(event.data); + if (payload.event !== "progress") { + return; + } + const data = payload.data; + setStatus(`${data.type || "task"}: ${data.status}`, Number(data.progress ?? 0)); + if (data.type === "upload" && data.status === "completed") { + uploadResult.textContent = `${data.filename} saved successfully`; + } + if (data.type === "download" && data.status === "completed") { + downloadResult.textContent = data.output || "download completed"; + } + if (data.status === "error") { + downloadResult.textContent = data.message || "task failed"; + } + }); + socket.addEventListener("close", () => { + setTimeout(connectWS, 1000); + }); +} + +async function api(path, options = {}) { + const response = await fetch(path, options); + const data = await response.json().catch(() => ({})); + if (!response.ok) { + const error = new Error(data.error || "request failed"); + error.status = response.status; + error.data = data; + throw error; + } + return data; +} + +function renderResults(results) { + searchResults.innerHTML = ""; + for (const item of results) { + const node = cardTemplate.content.firstElementChild.cloneNode(true); + node.href = item.link; + node.querySelector("img").src = item.thumbnailUrl; + node.querySelector("img").alt = item.title; + node.querySelector("h3").textContent = item.title; + node.querySelector("p").textContent = item.reason; + node.querySelector(".source-badge").textContent = item.source; + searchResults.appendChild(node); + } +} + +searchForm.addEventListener("submit", async (event) => { + event.preventDefault(); + setStatus("searching", 20); + searchWarning.classList.add("hidden"); + try { + const data = await api("/api/search", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ query: searchQuery.value }), + }); + renderResults(data.results || []); + if (data.warning) { + searchWarning.textContent = data.warning; + searchWarning.classList.remove("hidden"); + } + setStatus("search complete", 100); + } catch (error) { + searchWarning.textContent = error.message; + searchWarning.classList.remove("hidden"); + setStatus("search failed", 100); + } +}); + +async function uploadFile(file) { + const formData = new FormData(); + formData.append("file", file); + uploadResult.textContent = "uploading..."; + await api("/api/upload", { method: "POST", body: formData }); +} + +dropzone.addEventListener("dragover", (event) => { + event.preventDefault(); + dropzone.classList.add("border-white/60", "bg-white/[0.08]"); +}); + +dropzone.addEventListener("dragleave", () => { + dropzone.classList.remove("border-white/60", "bg-white/[0.08]"); +}); + +dropzone.addEventListener("drop", async (event) => { + event.preventDefault(); + dropzone.classList.remove("border-white/60", "bg-white/[0.08]"); + const file = event.dataTransfer.files[0]; + if (file) { + await uploadFile(file); + } +}); + +fileInput.addEventListener("change", async () => { + const [file] = fileInput.files; + if (file) { + await uploadFile(file); + } +}); + +downloadForm.addEventListener("submit", async (event) => { + event.preventDefault(); + downloadResult.textContent = "checking duplicate history..."; + try { + const dup = await api(`/api/history/check?url=${encodeURIComponent(downloadUrl.value)}`); + let force = false; + if (dup.exists) { + force = window.confirm("동일 URL 다운로드 이력이 있습니다. 계속 진행할까요?"); + if (!force) { + downloadResult.textContent = "cancelled"; + return; + } + } + const data = await api("/api/download", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ + url: downloadUrl.value, + start: startTime.value, + end: endTime.value, + force, + }), + }); + downloadResult.textContent = data.message; + } catch (error) { + downloadResult.textContent = error.message; + } +}); + +connectWS(); +setStatus("idle", 0); diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..3bfde0b --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,91 @@ + + +
+ + +AI Media Asset Ingest Hub
+Zone A
+Zone B
+Zone C
+