File size: 5,067 Bytes
5e53604
 
 
 
 
 
 
 
 
730abe3
5e53604
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
730abe3
 
5e53604
 
 
 
 
aec21be
5e53604
 
 
 
 
 
 
 
 
aec21be
 
 
 
 
730abe3
aec21be
 
5e53604
 
 
 
 
b34dcc3
5e53604
 
 
b34dcc3
 
 
 
 
5e53604
 
 
b34dcc3
5e53604
 
 
b34dcc3
730abe3
 
 
b34dcc3
5e53604
730abe3
 
 
 
5e53604
 
 
 
730abe3
5e53604
 
 
 
 
 
 
 
 
 
 
 
730abe3
5e53604
 
 
 
 
 
 
 
 
 
aec21be
5e53604
 
 
 
 
 
 
 
 
 
 
730abe3
5e53604
 
 
 
 
 
 
 
 
 
01e7a0a
 
730abe3
01e7a0a
 
 
5e53604
 
 
 
 
 
 
 
aec21be
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import httpx
import ffmpeg
import os
import sqlite3
import uvicorn
import uuid
from fastapi import FastAPI, BackgroundTasks
from pydantic import BaseModel
from starlette.responses import FileResponse, JSONResponse
from typing import Optional

# --- Configuration ---
app = FastAPI()
DOWNLOAD_DIR = "downloads"
ENCODED_DIR = "encoded"
DB_FILE = "jobs.db"
os.makedirs(DOWNLOAD_DIR, exist_ok=True)
os.makedirs(ENCODED_DIR, exist_ok=True)
# ---------------------

# --- Database Setup ---
def get_db():
    conn = sqlite3.connect(DB_FILE)
    conn.row_factory = sqlite3.Row
    return conn

def init_db():
    with get_db() as conn:
        conn.execute('''
            CREATE TABLE IF NOT EXISTS jobs (
                task_id TEXT PRIMARY KEY,
                status TEXT NOT NULL,
                file_name TEXT,
                error_message TEXT,
                duration INTEGER 
            )
        ''')
        conn.commit()

# --- Background Re-encoding Task ---
def process_video_task(url: str, cookie: Optional[str], task_id: str):
    db = get_db()
    temp_in_path = os.path.join(DOWNLOAD_DIR, f"{task_id}_in.mp4")
    temp_out_path = os.path.join(ENCODED_DIR, f"{task_id}_out.mp4")

    try:
        # 1. Update DB: Downloading
        db.execute("UPDATE jobs SET status = 'downloading' WHERE task_id = ?", (task_id,))
        db.commit()
        
        headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36",
            "Referer": "https://terabox.com/"
        }
        if cookie:
            headers["Cookie"] = cookie 
        
        with httpx.stream("GET", url, headers=headers, follow_redirects=True, timeout=600.0) as r:
            r.raise_for_status()
            with open(temp_in_path, 'wb') as f:
                for chunk in r.iter_bytes(chunk_size=8192):
                    f.write(chunk)
        
        # 2. Update DB: "Fixing" (not encoding)
        db.execute("UPDATE jobs SET status = 'encoding' WHERE task_id = ?", (task_id,))
        db.commit()

        #
        # βœ…βœ…βœ… THE FIX IS HERE βœ…βœ…βœ…
        # We changed this from vcodec='libx264' to 'c=copy'
        # This is the FAST (0% CPU) "re-package"
        #
        (
            ffmpeg
            .input(temp_in_path)
            .output(temp_out_path, c='copy', movflags='+faststart') # <-- CHANGED
            .run(capture_stdout=True, capture_stderr=True)
        )
        
        # 3. Get duration using ffprobe
        probe = ffmpeg.probe(temp_out_path)
        duration = int(float(probe['format']['duration'])) # Get duration in seconds
        
        # 4. Update DB: Complete
        final_file_name = f"{task_id}_out.mp4"
        db.execute(
            "UPDATE jobs SET status = 'complete', file_name = ?, duration = ? WHERE task_id = ?", 
            (final_file_name, duration, task_id)
        )
        db.commit()

    except Exception as e:
        error_msg = str(e)
        if hasattr(e, 'stderr'): 
            error_msg = e.stderr.decode()
        print(f"Error processing task {task_id}: {error_msg}")
        db.execute("UPDATE jobs SET status = 'error', error_message = ? WHERE task_id = ?", (error_msg, task_id))
        db.commit()
    finally:
        if os.path.exists(temp_in_path):
            os.remove(temp_in_path)
        db.close()

# --- API Endpoints ---
class VideoRequest(BaseModel):
    url: str
    cookie: Optional[str] = None 

@app.post("/process")
async def start_processing(request: VideoRequest, background_tasks: BackgroundTasks):
    """ START the encoding job. """
    task_id = str(uuid.uuid4())
    
    with get_db() as db:
        db.execute("INSERT INTO jobs (task_id, status) VALUES (?, 'queued')", (task_id,))
        db.commit()
        
    background_tasks.add_task(process_video_task, request.url, request.cookie, task_id)
    return {"status": "queued", "task_id": task_id}

@app.get("/status/{task_id}")
async def get_status(task_id: str):
    """ CHECK the status of the encoding job. """
    with get_db() as db:
        job = db.execute("SELECT * FROM jobs WHERE task_id = ?", (task_id,)).fetchone()
    
    if not job:
        return JSONResponse(status_code=404, content={"status": "not_found"})
    
    return dict(job) # This will now include 'duration'

@app.get("/download/{file_name}")
async def download_file(file_name: str):
    """ DOWNLOAD the final re-encoded file. """
    file_path = os.path.join(ENCODED_DIR, file_name)
    if not os.path.exists(file_path):
        return JSONResponse(status_code=404, content={"status": "file_not_found"})
    
    return FileResponse(file_path, media_type='video/mp4', filename=file_name)

@app.get("/")
async def health_check():
    """ Health check endpoint. """
    return {"status": "ok"}


# --- Server Start ---
@app.on_event("startup")
async def startup_event():
    init_db() # Create the database and table on startup
    print("Database initialized.")

if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=7860)