🐋
CodeFlow
PRO • Anti-Stop Engine
IA Entreprise
developer
AI: Rosalinda Ready
Rosalinda • Agent
Main Workspace
Write a request below. I will plan, generate, verify and deliver without interruption.
Mode: Assistant + Generation • Anti-Stop: Watchdog + Resume
Preview
File: index.html
Anti-Stop: if it freezes, auto-resume + HTML repair + retry.
Backend: Offline
Job:
const input = document.getElementById("prompt"); const btnSend = document.getElementById("btnSend"); const chat = document.getElementById("chat"); function addMessage(author, text) { const div = document.createElement("div"); div.className = "rounded-xl bg-white/5 border border-white/10 p-3 text-sm"; div.innerHTML = `${author}: ${text}`; chat.appendChild(div); chat.scrollTop = chat.scrollHeight; } function handleSend() { const text = input.value.trim(); if (!text) return; console.log("MESSAGE CAPTURED:", text); addMessage("You", text); input.value = ""; // Défense : backend absent addMessage("System", "⚠️ Backend offline — aucune IA n'est connectée."); } btnSend.addEventListener("click", handleSend); input.addEventListener("keydown", (e) => { if (e.key === "Enter") { e.preventDefault(); handleSend(); } }); ollama pull llama3.1 { "name": "rosalinda-server", "type": "module", "dependencies": { "cors": "^2.8.5", "express": "^4.19.2", "helmet": "^7.1.0", "express-rate-limit": "^7.4.0", "node-fetch": "^3.3.2" } } cd C:\rosalinda-server npm install setx ROSALINDA_INTERNAL_TOKEN "mon_super_secret" # ferme / rouvre PowerShell node server.js zone-ai/ README.md .env.example docker-compose.yml Makefile apps/ api/ Dockerfile requirements.txt alembic.ini alembic/ env.py script.py.mako versions/ 0001_init.py app/ __init__.py main.py core/ __init__.py config.py security.py logging.py rate_limit.py errors.py db/ __init__.py session.py models.py crud.py api/ __init__.py deps.py routes_auth.py routes_chat.py routes_jobs.py routes_assets.py routes_health.py services/ __init__.py llm.py images.py videos.py storage.py safety.py schemas/ __init__.py auth.py chat.py jobs.py assets.py workers/ runner/ Dockerfile requirements.txt worker.py tasks.py pipelines/ __init__.py image_sdxl.py video_svd.py util.py web/ Dockerfile package.json next.config.js tailwind.config.js postcss.config.js tsconfig.json src/ app/ layout.tsx page.tsx api/ auth/ login/route.ts me/route.ts chat/route.ts jobs/route.ts assets/route.ts components/ ChatPanel.tsx StudioPanel.tsx JobsPanel.tsx TopBar.tsx Sidebar.tsx CodePanel.tsx lib/ api.ts auth.ts types.ts styles/ globals.css md Copier le code # README.md # Zone AI (API + Workers + Web) — Image + Video generation + Chat agent ## Quickstart cp .env.example .env docker compose up --build ## URLs API: http://localhost:8080/docs WEB: http://localhost:3000 MinIO: http://localhost:9001 env Copier le code # .env.example APP_NAME=ZoneAI ENV=dev JWT_SECRET=change_me_super_secret JWT_EXPIRES_MIN=1440 POSTGRES_HOST=postgres POSTGRES_PORT=5432 POSTGRES_DB=zoneai POSTGRES_USER=zoneai POSTGRES_PASSWORD=zoneai_pw REDIS_URL=redis://redis:6379/0 S3_ENDPOINT=http://minio:9000 S3_ACCESS_KEY=zoneai S3_SECRET_KEY=zoneai_secret S3_BUCKET=zoneai S3_REGION=us-east-1 S3_PUBLIC_BASE=http://localhost:9000/zoneai # Optional external LLM (you can swap provider) LLM_PROVIDER=stub OPENAI_API_KEY= OPENAI_BASE_URL=https://api.openai.com/v1 OPENAI_MODEL=gpt-4o-mini # Image/Video local model settings HF_HOME=/models/hf TORCH_HOME=/models/torch IMAGE_MODEL_ID=stabilityai/stable-diffusion-xl-base-1.0 VIDEO_MODEL_ID=stabilityai/stable-video-diffusion-img2vid-xt # Rate limits RATE_LIMIT_PER_MIN=120 MAX_UPLOAD_MB=50 yaml Copier le code # docker-compose.yml services: postgres: image: postgres:16 environment: POSTGRES_DB: ${POSTGRES_DB} POSTGRES_USER: ${POSTGRES_USER} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} ports: ["5432:5432"] volumes: ["pgdata:/var/lib/postgresql/data"] healthcheck: test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"] interval: 5s timeout: 5s retries: 20 redis: image: redis:7 ports: ["6379:6379"] minio: image: minio/minio:RELEASE.2024-10-13T13-34-11Z command: server /data --console-address ":9001" environment: MINIO_ROOT_USER: ${S3_ACCESS_KEY} MINIO_ROOT_PASSWORD: ${S3_SECRET_KEY} ports: ["9000:9000", "9001:9001"] volumes: ["miniodata:/data"] createbuckets: image: minio/mc depends_on: [minio] entrypoint: > /bin/sh -c " mc alias set local ${S3_ENDPOINT} ${S3_ACCESS_KEY} ${S3_SECRET_KEY}; mc mb -p local/${S3_BUCKET} || true; mc anonymous set download local/${S3_BUCKET} || true; exit 0; " api: build: ./apps/api env_file: .env depends_on: postgres: condition: service_healthy redis: condition: service_started createbuckets: condition: service_completed_successfully ports: ["8080:8080"] volumes: - ./apps/api:/app command: > sh -c " alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8080 --reload " worker: build: ./workers/runner env_file: .env depends_on: redis: condition: service_started api: condition: service_started volumes: - ./workers/runner:/worker - models:/models environment: NVIDIA_VISIBLE_DEVICES: all command: > sh -c " celery -A worker.celery_app worker --loglevel=INFO --concurrency=1 " web: build: ./web env_file: .env depends_on: [api] ports: ["3000:3000"] volumes: - ./web:/web - /web/node_modules command: > sh -c "npm install && npm run dev -- -p 3000" volumes: pgdata: miniodata: models: makefile Copier le code # Makefile up: docker compose up --build down: docker compose down -v logs: docker compose logs -f --tail=200 dockerfile Copier le code # apps/api/Dockerfile FROM python:3.11-slim WORKDIR /app RUN apt-get update && apt-get install -y --no-install-recommends gcc curl && rm -rf /var/lib/apt/lists/* COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt COPY . . EXPOSE 8080 txt Copier le code # apps/api/requirements.txt fastapi==0.115.5 uvicorn[standard]==0.30.6 pydantic==2.9.2 pydantic-settings==2.5.2 python-jose==3.3.0 passlib[bcrypt]==1.7.4 SQLAlchemy==2.0.36 psycopg2-binary==2.9.10 alembic==1.13.3 redis==5.1.1 celery==5.4.0 httpx==0.27.2 python-multipart==0.0.12 boto3==1.35.45 botocore==1.35.45 tenacity==9.0.0 slowapi==0.1.9 orjson==3.10.10 python Copier le code # apps/api/app/core/config.py from pydantic_settings import BaseSettings class Settings(BaseSettings): APP_NAME: str = "ZoneAI" ENV: str = "dev" JWT_SECRET: str JWT_EXPIRES_MIN: int = 1440 POSTGRES_HOST: str POSTGRES_PORT: int = 5432 POSTGRES_DB: str POSTGRES_USER: str POSTGRES_PASSWORD: str REDIS_URL: str S3_ENDPOINT: str S3_ACCESS_KEY: str S3_SECRET_KEY: str S3_BUCKET: str S3_REGION: str = "us-east-1" S3_PUBLIC_BASE: str LLM_PROVIDER: str = "stub" OPENAI_API_KEY: str | None = None OPENAI_BASE_URL: str = "https://api.openai.com/v1" OPENAI_MODEL: str = "gpt-4o-mini" IMAGE_MODEL_ID: str = "stabilityai/stable-diffusion-xl-base-1.0" VIDEO_MODEL_ID: str = "stabilityai/stable-video-diffusion-img2vid-xt" RATE_LIMIT_PER_MIN: int = 120 MAX_UPLOAD_MB: int = 50 class Config: env_file = ".env" extra = "ignore" settings = Settings() python Copier le code # apps/api/app/core/security.py from datetime import datetime, timedelta, timezone from jose import jwt from passlib.context import CryptContext from .config import settings pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") ALGO = "HS256" def hash_password(pw: str) -> str: return pwd_context.hash(pw) def verify_password(pw: str, hashed: str) -> bool: return pwd_context.verify(pw, hashed) def create_access_token(sub: str) -> str: exp = datetime.now(timezone.utc) + timedelta(minutes=settings.JWT_EXPIRES_MIN) payload = {"sub": sub, "exp": exp} return jwt.encode(payload, settings.JWT_SECRET, algorithm=ALGO) def decode_token(token: str) -> dict: return jwt.decode(token, settings.JWT_SECRET, algorithms=[ALGO]) python Copier le code # apps/api/app/core/errors.py from fastapi import HTTPException def http_error(status: int, msg: str): raise HTTPException(status_code=status, detail=msg) python Copier le code # apps/api/app/core/logging.py import logging def setup_logging(): logging.basicConfig( level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s - %(message)s", ) python Copier le code # apps/api/app/core/rate_limit.py from slowapi import Limiter from slowapi.util import get_remote_address from .config import settings limiter = Limiter(key_func=get_remote_address, default_limits=[f"{settings.RATE_LIMIT_PER_MIN}/minute"]) python Copier le code # apps/api/app/db/session.py from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, DeclarativeBase from app.core.config import settings DATABASE_URL = ( f"postgresql+psycopg2://{settings.POSTGRES_USER}:{settings.POSTGRES_PASSWORD}" f"@{settings.POSTGRES_HOST}:{settings.POSTGRES_PORT}/{settings.POSTGRES_DB}" ) engine = create_engine(DATABASE_URL, pool_pre_ping=True) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) class Base(DeclarativeBase): pass python Copier le code # apps/api/app/db/models.py from sqlalchemy import String, Text, DateTime, ForeignKey, Integer from sqlalchemy.orm import Mapped, mapped_column, relationship from datetime import datetime, timezone from .session import Base def now_utc(): return datetime.now(timezone.utc) class User(Base): __tablename__ = "users" id: Mapped[int] = mapped_column(Integer, primary_key=True) email: Mapped[str] = mapped_column(String(255), unique=True, index=True) password_hash: Mapped[str] = mapped_column(String(255)) created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=now_utc) class Asset(Base): __tablename__ = "assets" id: Mapped[int] = mapped_column(Integer, primary_key=True) owner_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE")) kind: Mapped[str] = mapped_column(String(32)) # image|video|audio|file mime: Mapped[str] = mapped_column(String(128)) s3_key: Mapped[str] = mapped_column(String(512), unique=True) public_url: Mapped[str] = mapped_column(Text) created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=now_utc) owner = relationship("User") class Job(Base): __tablename__ = "jobs" id: Mapped[int] = mapped_column(Integer, primary_key=True) owner_id: Mapped[int] = mapped_column(ForeignKey("users.id", ondelete="CASCADE")) type: Mapped[str] = mapped_column(String(32)) # chat|image|video status: Mapped[str] = mapped_column(String(32), default="queued") # queued|running|done|error prompt: Mapped[str] = mapped_column(Text) params_json: Mapped[str] = mapped_column(Text, default="{}") result_asset_id: Mapped[int | None] = mapped_column(ForeignKey("assets.id", ondelete="SET NULL"), nullable=True) error: Mapped[str | None] = mapped_column(Text, nullable=True) created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=now_utc) updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=now_utc) owner = relationship("User") result_asset = relationship("Asset") python Copier le code # apps/api/app/db/crud.py from sqlalchemy.orm import Session from .models import User, Job, Asset from app.core.security import hash_password def get_user_by_email(db: Session, email: str) -> User | None: return db.query(User).filter(User.email == email).first() def create_user(db: Session, email: str, password: str) -> User: u = User(email=email, password_hash=hash_password(password)) db.add(u) db.commit() db.refresh(u) return u def create_job(db: Session, owner_id: int, type_: str, prompt: str, params_json: str) -> Job: j = Job(owner_id=owner_id, type=type_, prompt=prompt, params_json=params_json, status="queued") db.add(j) db.commit() db.refresh(j) return j def set_job_status(db: Session, job_id: int, status: str, error: str | None = None, asset_id: int | None = None): j = db.query(Job).filter(Job.id == job_id).first() if not j: return j.status = status j.error = error j.result_asset_id = asset_id db.add(j) db.commit() db.refresh(j) def create_asset(db: Session, owner_id: int, kind: str, mime: str, s3_key: str, public_url: str) -> Asset: a = Asset(owner_id=owner_id, kind=kind, mime=mime, s3_key=s3_key, public_url=public_url) db.add(a) db.commit() db.refresh(a) return a python Copier le code # apps/api/app/api/deps.py from fastapi import Depends, Header from sqlalchemy.orm import Session from app.db.session import SessionLocal from app.core.security import decode_token from app.core.errors import http_error from app.db.models import User from app.db.crud import get_user_by_email def get_db(): db = SessionLocal() try: yield db finally: db.close() def get_current_user( db: Session = Depends(get_db), authorization: str | None = Header(default=None), ) -> User: if not authorization or not authorization.startswith("Bearer "): http_error(401, "Missing token") token = authorization.split(" ", 1)[1].strip() try: payload = decode_token(token) email = payload.get("sub") if not email: http_error(401, "Invalid token") u = get_user_by_email(db, email) if not u: http_error(401, "User not found") return u except Exception: http_error(401, "Invalid token") python Copier le code # apps/api/app/schemas/auth.py from pydantic import BaseModel, EmailStr class AuthRegisterIn(BaseModel): email: EmailStr password: str class AuthLoginIn(BaseModel): email: EmailStr password: str class AuthOut(BaseModel): token: str class MeOut(BaseModel): id: int email: EmailStr python Copier le code # apps/api/app/schemas/jobs.py from pydantic import BaseModel from typing import Any class JobCreateIn(BaseModel): type: str # chat|image|video prompt: str params: dict[str, Any] = {} class JobOut(BaseModel): id: int type: str status: str prompt: str params_json: str result_asset_id: int | None error: str | None python Copier le code # apps/api/app/schemas/assets.py from pydantic import BaseModel class AssetOut(BaseModel): id: int kind: str mime: str public_url: str python Copier le code # apps/api/app/services/storage.py import boto3 from botocore.config import Config from app.core.config import settings s3 = boto3.client( "s3", endpoint_url=settings.S3_ENDPOINT, aws_access_key_id=settings.S3_ACCESS_KEY, aws_secret_access_key=settings.S3_SECRET_KEY, region_name=settings.S3_REGION, config=Config(s3={"addressing_style": "path"}), ) def put_bytes(key: str, data: bytes, mime: str) -> str: s3.put_object(Bucket=settings.S3_BUCKET, Key=key, Body=data, ContentType=mime) return f"{settings.S3_PUBLIC_BASE}/{key}" python Copier le code # apps/api/app/services/safety.py def basic_prompt_guard(prompt: str) -> None: blocked = ["csam", "sexual content involving minors"] low = prompt.lower() for b in blocked: if b in low: raise ValueError("Blocked prompt") python Copier le code # apps/api/app/services/llm.py from app.core.config import settings import httpx class LLM: async def complete(self, prompt: str) -> str: if settings.LLM_PROVIDER == "openai": return await self._openai(prompt) return f"STUB: {prompt}" async def _openai(self, prompt: str) -> str: if not settings.OPENAI_API_KEY: return "OPENAI_API_KEY missing" headers = {"Authorization": f"Bearer {settings.OPENAI_API_KEY}"} payload = { "model": settings.OPENAI_MODEL, "messages": [{"role": "user", "content": prompt}], } async with httpx.AsyncClient(base_url=settings.OPENAI_BASE_URL, timeout=60) as client: r = await client.post("/chat/completions", headers=headers, json=payload) r.raise_for_status() data = r.json() return data["choices"][0]["message"]["content"] llm = LLM() python Copier le code # apps/api/app/services/images.py # Placeholder: generation is done in worker; API only enqueues jobs. python Copier le code # apps/api/app/services/videos.py # Placeholder: generation is done in worker; API only enqueues jobs. python Copier le code # apps/api/app/api/routes_auth.py from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from app.api.deps import get_db, get_current_user from app.schemas.auth import AuthRegisterIn, AuthLoginIn, AuthOut, MeOut from app.db.crud import get_user_by_email, create_user from app.core.security import verify_password, create_access_token from app.core.errors import http_error router = APIRouter(prefix="/auth", tags=["auth"]) @router.post("/register", response_model=AuthOut) def register(body: AuthRegisterIn, db: Session = Depends(get_db)): if get_user_by_email(db, body.email): http_error(409, "Email already exists") create_user(db, body.email, body.password) token = create_access_token(body.email) return {"token": token} @router.post("/login", response_model=AuthOut) def login(body: AuthLoginIn, db: Session = Depends(get_db)): u = get_user_by_email(db, body.email) if not u or not verify_password(body.password, u.password_hash): http_error(401, "Bad credentials") token = create_access_token(u.email) return {"token": token} @router.get("/me", response_model=MeOut) def me(user=Depends(get_current_user)): return {"id": user.id, "email": user.email} python Copier le code # apps/api/app/api/routes_jobs.py import json from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from app.api.deps import get_db, get_current_user from app.schemas.jobs import JobCreateIn, JobOut from app.db.crud import create_job from app.core.errors import http_error from app.core.rate_limit import limiter from fastapi import Request from app.core.config import settings from celery import Celery celery_app = Celery("zoneai", broker=settings.REDIS_URL, backend=settings.REDIS_URL) router = APIRouter(prefix="/jobs", tags=["jobs"]) @router.post("", response_model=JobOut) @limiter.limit("60/minute") def create_job_route(request: Request, body: JobCreateIn, db: Session = Depends(get_db), user=Depends(get_current_user)): if body.type not in ("chat", "image", "video"): http_error(400, "Invalid job type") params_json = json.dumps(body.params or {}, ensure_ascii=False) job = create_job(db, owner_id=user.id, type_=body.type, prompt=body.prompt, params_json=params_json) celery_app.send_task("tasks.run_job", args=[job.id]) return JobOut( id=job.id, type=job.type, status=job.status, prompt=job.prompt, params_json=job.params_json, result_asset_id=job.result_asset_id, error=job.error, ) @router.get("/{job_id}", response_model=JobOut) def get_job(job_id: int, db: Session = Depends(get_db), user=Depends(get_current_user)): from app.db.models import Job job = db.query(Job).filter(Job.id == job_id, Job.owner_id == user.id).first() if not job: http_error(404, "Not found") return JobOut( id=job.id, type=job.type, status=job.status, prompt=job.prompt, params_json=job.params_json, result_asset_id=job.result_asset_id, error=job.error, ) python Copier le code # apps/api/app/api/routes_assets.py from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from app.api.deps import get_db, get_current_user from app.schemas.assets import AssetOut from app.core.errors import http_error router = APIRouter(prefix="/assets", tags=["assets"]) @router.get("/{asset_id}", response_model=AssetOut) def get_asset(asset_id: int, db: Session = Depends(get_db), user=Depends(get_current_user)): from app.db.models import Asset a = db.query(Asset).filter(Asset.id == asset_id, Asset.owner_id == user.id).first() if not a: http_error(404, "Not found") return AssetOut(id=a.id, kind=a.kind, mime=a.mime, public_url=a.public_url) python Copier le code # apps/api/app/api/routes_chat.py from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from app.api.deps import get_db, get_current_user from app.services.llm import llm from app.schemas.jobs import JobCreateIn from app.db.crud import create_job, set_job_status import json router = APIRouter(prefix="/chat", tags=["chat"]) @router.post("") async def chat(body: JobCreateIn, db: Session = Depends(get_db), user=Depends(get_current_user)): job = create_job(db, owner_id=user.id, type_="chat", prompt=body.prompt, params_json=json.dumps(body.params or {})) try: set_job_status(db, job.id, "running") text = await llm.complete(body.prompt) set_job_status(db, job.id, "done") return {"job_id": job.id, "text": text} except Exception as e: set_job_status(db, job.id, "error", error=str(e)) return {"job_id": job.id, "error": str(e)} python Copier le code # apps/api/app/api/routes_health.py from fastapi import APIRouter router = APIRouter(tags=["health"]) @router.get("/health") def health(): return {"ok": True} python Copier le code # apps/api/app/main.py from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from slowapi.errors import RateLimitExceeded from slowapi import _rate_limit_exceeded_handler from app.core.logging import setup_logging from app.core.rate_limit import limiter from app.api.routes_health import router as health_router from app.api.routes_auth import router as auth_router from app.api.routes_jobs import router as jobs_router from app.api.routes_assets import router as assets_router from app.api.routes_chat import router as chat_router setup_logging() app = FastAPI(title="ZoneAI API") app.state.limiter = limiter app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) app.include_router(health_router) app.include_router(auth_router) app.include_router(chat_router) app.include_router(jobs_router) app.include_router(assets_router) ini Copier le code # apps/api/alembic.ini [alembic] script_location = alembic sqlalchemy.url = driver://user:pass@localhost/dbname [loggers] keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = INFO handlers = console [logger_sqlalchemy] level = WARN handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s python Copier le code # apps/api/alembic/env.py from logging.config import fileConfig from sqlalchemy import engine_from_config, pool from alembic import context import os from app.db.session import Base from app.db import models # noqa config = context.config fileConfig(config.config_file_name) def get_url(): host = os.environ["POSTGRES_HOST"] port = os.environ.get("POSTGRES_PORT", "5432") db = os.environ["POSTGRES_DB"] user = os.environ["POSTGRES_USER"] pw = os.environ["POSTGRES_PASSWORD"] return f"postgresql+psycopg2://{user}:{pw}@{host}:{port}/{db}" target_metadata = Base.metadata def run_migrations_offline(): context.configure(url=get_url(), target_metadata=target_metadata, literal_binds=True) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): connectable = engine_from_config( {"sqlalchemy.url": get_url()}, prefix="sqlalchemy.", poolclass=pool.NullPool, ) with connectable.connect() as connection: context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() python Copier le code # apps/api/alembic/versions/0001_init.py from alembic import op import sqlalchemy as sa revision = "0001_init" down_revision = None branch_labels = None depends_on = None def upgrade(): op.create_table( "users", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("email", sa.String(length=255), nullable=False), sa.Column("password_hash", sa.String(length=255), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), ) op.create_index("ix_users_email", "users", ["email"], unique=True) op.create_table( "assets", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("owner_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False), sa.Column("kind", sa.String(length=32), nullable=False), sa.Column("mime", sa.String(length=128), nullable=False), sa.Column("s3_key", sa.String(length=512), nullable=False), sa.Column("public_url", sa.Text(), nullable=False), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), ) op.create_index("ix_assets_s3_key", "assets", ["s3_key"], unique=True) op.create_table( "jobs", sa.Column("id", sa.Integer(), primary_key=True), sa.Column("owner_id", sa.Integer(), sa.ForeignKey("users.id", ondelete="CASCADE"), nullable=False), sa.Column("type", sa.String(length=32), nullable=False), sa.Column("status", sa.String(length=32), nullable=False), sa.Column("prompt", sa.Text(), nullable=False), sa.Column("params_json", sa.Text(), nullable=False), sa.Column("result_asset_id", sa.Integer(), sa.ForeignKey("assets.id", ondelete="SET NULL")), sa.Column("error", sa.Text()), sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), ) def downgrade(): op.drop_table("jobs") op.drop_index("ix_assets_s3_key", table_name="assets") op.drop_table("assets") op.drop_index("ix_users_email", table_name="users") op.drop_table("users") dockerfile Copier le code # workers/runner/Dockerfile FROM python:3.11-slim WORKDIR /worker RUN apt-get update && apt-get install -y --no-install-recommends git ffmpeg libgl1 libglib2.0-0 && rm -rf /var/lib/apt/lists/* COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt COPY . . txt Copier le code # workers/runner/requirements.txt celery==5.4.0 redis==5.1.1 SQLAlchemy==2.0.36 psycopg2-binary==2.9.10 boto3==1.35.45 botocore==1.35.45 orjson==3.10.10 torch==2.4.1 torchvision==0.19.1 diffusers==0.31.0 transformers==4.45.2 accelerate==0.35.0 safetensors==0.4.5 opencv-python==4.10.0.84 imageio==2.36.0 imageio-ffmpeg==0.5.1 pillow==10.4.0 python Copier le code # workers/runner/worker.py from celery import Celery import os REDIS_URL = os.environ["REDIS_URL"] celery_app = Celery("worker", broker=REDIS_URL, backend=REDIS_URL) celery_app.autodiscover_tasks(["tasks"]) python Copier le code # workers/runner/pipelines/util.py import os import boto3 from botocore.config import Config def s3_client(): endpoint = os.environ["S3_ENDPOINT"] access = os.environ["S3_ACCESS_KEY"] secret = os.environ["S3_SECRET_KEY"] region = os.environ.get("S3_REGION", "us-east-1") return boto3.client( "s3", endpoint_url=endpoint, aws_access_key_id=access, aws_secret_access_key=secret, region_name=region, config=Config(s3={"addressing_style": "path"}), ) def put_bytes(key: str, data: bytes, mime: str) -> str: bucket = os.environ["S3_BUCKET"] public_base = os.environ["S3_PUBLIC_BASE"] s3 = s3_client() s3.put_object(Bucket=bucket, Key=key, Body=data, ContentType=mime) return f"{public_base}/{key}" python Copier le code # workers/runner/pipelines/image_sdxl.py import io, os, uuid from PIL import Image import torch from diffusers import StableDiffusionXLPipeline from .util import put_bytes _pipe = None def get_pipe(): global _pipe if _pipe is None: model_id = os.environ.get("IMAGE_MODEL_ID", "stabilityai/stable-diffusion-xl-base-1.0") _pipe = StableDiffusionXLPipeline.from_pretrained( model_id, torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32, variant="fp16" if torch.cuda.is_available() else None, use_safetensors=True, ) if torch.cuda.is_available(): _pipe = _pipe.to("cuda") _pipe.enable_attention_slicing() return _pipe def generate_image(prompt: str, negative: str = "", width: int = 1024, height: int = 1024, steps: int = 30, guidance: float = 6.5, seed: int | None = None): pipe = get_pipe() gen = None if seed is not None: device = "cuda" if torch.cuda.is_available() else "cpu" gen = torch.Generator(device=device).manual_seed(int(seed)) out = pipe( prompt=prompt, negative_prompt=negative or None, width=int(width), height=int(height), num_inference_steps=int(steps), guidance_scale=float(guidance), generator=gen, ) img: Image.Image = out.images[0] buf = io.BytesIO() img.save(buf, format="PNG") data = buf.getvalue() key = f"images/{uuid.uuid4().hex}.png" url = put_bytes(key, data, "image/png") return key, url python Copier le code # workers/runner/pipelines/video_svd.py import io, os, uuid from PIL import Image import numpy as np import torch import imageio.v3 as iio from diffusers import StableVideoDiffusionPipeline from .util import put_bytes _pipe = None def get_pipe(): global _pipe if _pipe is None: model_id = os.environ.get("VIDEO_MODEL_ID", "stabilityai/stable-video-diffusion-img2vid-xt") _pipe = StableVideoDiffusionPipeline.from_pretrained( model_id, torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32, variant="fp16" if torch.cuda.is_available() else None, ) if torch.cuda.is_available(): _pipe = _pipe.to("cuda") _pipe.enable_attention_slicing() return _pipe def generate_video_from_image(pil_image: Image.Image, motion_bucket_id: int = 127, fps: int = 12, frames: int = 25, seed: int | None = None): pipe = get_pipe() image = pil_image.convert("RGB").resize((1024, 576)) gen = None if seed is not None: device = "cuda" if torch.cuda.is_available() else "cpu" gen = torch.Generator(device=device).manual_seed(int(seed)) out = pipe( image=image, motion_bucket_id=int(motion_bucket_id), num_frames=int(frames), generator=gen, ) video_frames = out.frames[0] arr = [np.array(f) for f in video_frames] buf = io.BytesIO() iio.imwrite(buf, arr, extension=".mp4", fps=int(fps)) data = buf.getvalue() key = f"videos/{uuid.uuid4().hex}.mp4" url = put_bytes(key, data, "video/mp4") return key, url python Copier le code # workers/runner/tasks.py import os, json from celery import shared_task from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from datetime import datetime, timezone from pipelines.image_sdxl import generate_image from pipelines.video_svd import generate_video_from_image from PIL import Image import requests DATABASE_URL = ( f"postgresql+psycopg2://{os.environ['POSTGRES_USER']}:{os.environ['POSTGRES_PASSWORD']}" f"@{os.environ['POSTGRES_HOST']}:{os.environ.get('POSTGRES_PORT','5432')}/{os.environ['POSTGRES_DB']}" ) engine = create_engine(DATABASE_URL, pool_pre_ping=True) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) def now_utc(): return datetime.now(timezone.utc) @shared_task(name="tasks.run_job") def run_job(job_id: int): db = SessionLocal() try: job = db.execute( "SELECT id, owner_id, type, prompt, params_json FROM jobs WHERE id=:id", {"id": job_id}, ).mappings().first() if not job: return db.execute("UPDATE jobs SET status='running', updated_at=:u WHERE id=:id", {"id": job_id, "u": now_utc()}) db.commit() params = json.loads(job["params_json"] or "{}") if job["type"] == "image": key, url = generate_image( prompt=job["prompt"], negative=params.get("negative", ""), width=params.get("width", 1024), height=params.get("height", 1024), steps=params.get("steps", 30), guidance=params.get("guidance", 6.5), seed=params.get("seed"), ) asset_id = _insert_asset(db, job["owner_id"], "image", "image/png", key, url) db.execute("UPDATE jobs SET status='done', result_asset_id=:a, updated_at=:u WHERE id=:id", {"a": asset_id, "u": now_utc(), "id": job_id}) db.commit() return if job["type"] == "video": # expects params.image_url OR params.image_asset_url img_url = params.get("image_url") or params.get("image_asset_url") if not img_url: raise RuntimeError("video job requires params.image_url") img_bytes = requests.get(img_url, timeout=60).content pil = Image.open(io.BytesIO(img_bytes)).convert("RGB") key, url = generate_video_from_image( pil_image=pil, motion_bucket_id=params.get("motion_bucket_id", 127), fps=params.get("fps", 12), frames=params.get("frames", 25), seed=params.get("seed"), ) asset_id = _insert_asset(db, job["owner_id"], "video", "video/mp4", key, url) db.execute("UPDATE jobs SET status='done', result_asset_id=:a, updated_at=:u WHERE id=:id", {"a": asset_id, "u": now_utc(), "id": job_id}) db.commit() return if job["type"] == "chat": db.execute("UPDATE jobs SET status='done', updated_at=:u WHERE id=:id", {"u": now_utc(), "id": job_id}) db.commit() return raise RuntimeError("Unknown job type") except Exception as e: db.execute("UPDATE jobs SET status='error', error=:err, updated_at=:u WHERE id=:id", {"err": str(e), "u": now_utc(), "id": job_id}) db.commit() finally: db.close() def _insert_asset(db, owner_id: int, kind: str, mime: str, s3_key: str, public_url: str) -> int: row = db.execute( "INSERT INTO assets (owner_id, kind, mime, s3_key, public_url, created_at) VALUES (:o,:k,:m,:s,:p,:c) RETURNING id", {"o": owner_id, "k": kind, "m": mime, "s": s3_key, "p": public_url, "c": now_utc()}, ).first() db.commit() return int(row[0]) dockerfile Copier le code # web/Dockerfile FROM node:20-alpine WORKDIR /web COPY package.json package-lock.json* ./ RUN npm install COPY . . EXPOSE 3000 json Copier le code // web/package.json { "name": "zone-ai-web", "private": true, "version": "1.0.0", "scripts": { "dev": "next dev", "build": "next build", "start": "next start" }, "dependencies": { "next": "15.0.3", "react": "19.0.0", "react-dom": "19.0.0" }, "devDependencies": { "typescript": "5.6.3", "tailwindcss": "3.4.14", "postcss": "8.4.47", "autoprefixer": "10.4.20" } } js Copier le code // web/next.config.js module.exports = { reactStrictMode: true }; js Copier le code // web/tailwind.config.js module.exports = { content: ["./src/**/*.{ts,tsx}"], theme: { extend: {} }, plugins: [] }; js Copier le code // web/postcss.config.js module.exports = { plugins: { tailwindcss: {}, autoprefixer: {} } }; json Copier le code // web/tsconfig.json { "compilerOptions": { "target": "ES2022", "lib": ["dom", "dom.iterable", "es2022"], "allowJs": false, "skipLibCheck": true, "strict": true, "noEmit": true, "module": "esnext", "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, "jsx": "preserve", "types": ["node"] }, "include": ["next-env.d.ts", "src/**/*.ts", "src/**/*.tsx"] } css Copier le code /* web/src/styles/globals.css */ @tailwind base; @tailwind components; @tailwind utilities; html, body { height: 100%; } ts Copier le code // web/src/lib/types.ts export type JobType = "chat" | "image" | "video"; export type JobStatus = "queued" | "running" | "done" | "error"; export type Job = { id: number; type: JobType; status: JobStatus; prompt: string; params_json: string; result_asset_id: number | null; error: string | null; }; export type Asset = { id: number; kind: string; mime: string; public_url: string; }; ts Copier le code // web/src/lib/auth.ts export const tokenKey = "zoneai_token"; export function getToken(): string | null { if (typeof window === "undefined") return null; return localStorage.getItem(tokenKey); } export function setToken(token: string) { localStorage.setItem(tokenKey, token); } export function clearToken() { localStorage.removeItem(tokenKey); } ts Copier le code // web/src/lib/api.ts import { getToken } from "./auth"; import type { Job, Asset, JobType } from "./types"; const API = process.env.NEXT_PUBLIC_API_BASE || "http://localhost:8080"; async function req(path: string, opts: RequestInit = {}) { const token = getToken(); const headers: any = { ...(opts.headers || {}) }; if (token) headers["Authorization"] = `Bearer ${token}`; if (!headers["Content-Type"] && opts.body) headers["Content-Type"] = "application/json"; const r = await fetch(`${API}${path}`, { ...opts, headers }); const data = await r.json().catch(() => ({})); if (!r.ok) throw new Error(data?.detail || "Request failed"); return data; } export async function register(email: string, password: string) { return req("/auth/register", { method: "POST", body: JSON.stringify({ email, password }) }); } export async function login(email: string, password: string) { return req("/auth/login", { method: "POST", body: JSON.stringify({ email, password }) }); } export async function me() { return req("/auth/me", { method: "GET" }); } export async function createJob(type: JobType, prompt: string, params: any = {}) { return req("/jobs", { method: "POST", body: JSON.stringify({ type, prompt, params }) }) as Promise; } export async function getJob(jobId: number) { return req(`/jobs/${jobId}`, { method: "GET" }) as Promise; } export async function getAsset(assetId: number) { return req(`/assets/${assetId}`, { method: "GET" }) as Promise; } export async function chat(prompt: string) { return req("/chat", { method: "POST", body: JSON.stringify({ type: "chat", prompt, params: {} }) }); } tsx Copier le code // web/src/app/layout.tsx import "../styles/globals.css"; import React from "react"; export default function RootLayout({ children }: { children: React.ReactNode }) { return ( {children} ); } tsx Copier le code // web/src/app/page.tsx "use client"; import React, { useEffect, useMemo, useState } from "react"; import { clearToken, getToken, setToken } from "../lib/auth"; import { chat, createJob, getAsset, getJob, login, register } from "../lib/api"; import type { Asset, Job } from "../lib/types"; export default function Page() { const [email, setEmail] = useState("admin@zoneai.local"); const [password, setPassword] = useState("admin1234"); const [authReady, setAuthReady] = useState(false); const [prompt, setPrompt] = useState(""); const [chatLog, setChatLog] = useState<{ role: string; text: string }[]>([]); const [jobs, setJobs] = useState([]); const [asset, setAssetState] = useState(null); const authed = useMemo(() => !!getToken(), [authReady]); useEffect(() => { setAuthReady(true); }, []); async function doRegister() { const r = await register(email, password); setToken(r.token); setAuthReady(x => !x); } async function doLogin() { const r = await login(email, password); setToken(r.token); setAuthReady(x => !x); } function doLogout() { clearToken(); setAuthReady(x => !x); } async function sendChat() { if (!prompt.trim()) return; const p = prompt.trim(); setPrompt(""); setChatLog(l => [...l, { role: "you", text: p }]); const r = await chat(p); if (r?.text) setChatLog(l => [...l, { role: "rosalinda", text: r.text }]); } async function genImage() { const p = prompt.trim(); if (!p) return; const j = await createJob("image", p, { width: 1024, height: 1024, steps: 30, guidance: 6.5 }); setJobs(x => [j, ...x]); pollJob(j.id); } async function genVideoFromLastImage() { if (!asset?.public_url) return; const j = await createJob("video", "video_from_image", { image_url: asset.public_url, fps: 12, frames: 25, motion_bucket_id: 127 }); setJobs(x => [j, ...x]); pollJob(j.id); } async function pollJob(id: number) { for (let i = 0; i < 120; i++) { const j = await getJob(id); setJobs(prev => prev.map(x => (x.id === j.id ? j : x))); if (j.status === "done" && j.result_asset_id) { const a = await getAsset(j.result_asset_id); setAssetState(a); return; } if (j.status === "error") return; await new Promise(r => setTimeout(r, 1000)); } } return (
Chat
{chatLog.map((m, idx) => (
{m.role}
{m.text}
))}
setPrompt(e.target.value)} placeholder="Écris une demande… (image / vidéo / question)" onKeyDown={(e) => { if (e.key === "Enter") sendChat(); }} />
Studio
{asset?.kind === "image" && } {asset?.kind === "video" && (
Jobs
{jobs.map(j => (
#{j.id} {j.type}
{j.status}
{j.error &&
{j.error}
}
))} {jobs.length === 0 &&
Aucun job.
}
); } ts Copier le code // web/src/app/api/auth/login/route.ts import { NextResponse } from "next/server"; export async function POST(req: Request) { const body = await req.json(); const r = await fetch("http://api:8080/auth/login", { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify(body) }); const data = await r.json(); return NextResponse.json(data, { status: r.status }); } ts Copier le code // web/src/app/api/auth/me/route.ts import { NextResponse } from "next/server"; export async function GET(req: Request) { const auth = req.headers.get("authorization") || ""; const r = await fetch("http://api:8080/auth/me", { headers: { authorization: auth } }); const data = await r.json(); return NextResponse.json(data, { status: r.status }); } ts Copier le code // web/src/app/api/chat/route.ts import { NextResponse } from "next/server"; export async function POST(req: Request) { const auth = req.headers.get("authorization") || ""; const body = await req.json(); const r = await fetch("http://api:8080/chat", { method: "POST", headers: { "Content-Type": "application/json", authorization: auth }, body: JSON.stringify(body) }); const data = await r.json(); return NextResponse.json(data, { status: r.status }); } ts Copier le code // web/src/app/api/jobs/route.ts import { NextResponse } from "next/server"; export async function POST(req: Request) { const auth = req.headers.get("authorization") || ""; const body = await req.json(); const r = await fetch("http://api:8080/jobs", { method: "POST", headers: { "Content-Type": "application/json", authorization: auth }, body: JSON.stringify(body) }); const data = await r.json(); return NextResponse.json(data, { status: r.status }); } ts Copier le code // web/src/app/api/assets/route.ts import { NextResponse } from "next/server"; export async function POST() { return NextResponse.json({ ok: true }); } txt Copier le code # NOTE: For production, put a real reverse proxy and set NEXT_PUBLIC_API_BASE to the public API URL.