Initial commit: Leopost Full — merge di Leopost, Post Generator e Autopilot OS
- Backend FastAPI con multi-LLM (Claude/OpenAI/Gemini) - Publishing su Facebook, Instagram, YouTube, TikTok - Calendario editoriale con awareness levels (PAS, AIDA, BAB...) - Design system Editorial Fresh (Fraunces + DM Sans) - Scheduler automatico, gestione commenti AI, affiliate links Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
0
backend/app/__init__.py
Normal file
0
backend/app/__init__.py
Normal file
60
backend/app/auth.py
Normal file
60
backend/app/auth.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import bcrypt
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from jose import JWTError, jwt
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .config import settings
|
||||
from .database import get_db
|
||||
from .models import User
|
||||
from .schemas import LoginRequest, Token
|
||||
|
||||
router = APIRouter(prefix="/api/auth", tags=["auth"])
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
return bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()).decode("utf-8")
|
||||
|
||||
|
||||
def verify_password(plain: str, hashed: str) -> bool:
|
||||
return bcrypt.checkpw(plain.encode("utf-8"), hashed.encode("utf-8"))
|
||||
|
||||
|
||||
def create_access_token(data: dict) -> str:
|
||||
to_encode = data.copy()
|
||||
expire = datetime.utcnow() + timedelta(minutes=settings.access_token_expire_minutes)
|
||||
to_encode.update({"exp": expire})
|
||||
return jwt.encode(to_encode, settings.secret_key, algorithm="HS256")
|
||||
|
||||
|
||||
def get_current_user(
|
||||
token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)
|
||||
) -> User:
|
||||
try:
|
||||
payload = jwt.decode(token, settings.secret_key, algorithms=["HS256"])
|
||||
username: str = payload.get("sub")
|
||||
if username is None:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
except JWTError:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
user = db.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
raise HTTPException(status_code=401, detail="User not found")
|
||||
return user
|
||||
|
||||
|
||||
@router.post("/login", response_model=Token)
|
||||
def login(request: LoginRequest, db: Session = Depends(get_db)):
|
||||
user = db.query(User).filter(User.username == request.username).first()
|
||||
if not user or not verify_password(request.password, user.hashed_password):
|
||||
raise HTTPException(status_code=401, detail="Invalid credentials")
|
||||
token = create_access_token({"sub": user.username})
|
||||
return Token(access_token=token)
|
||||
|
||||
|
||||
@router.get("/me")
|
||||
def me(user: User = Depends(get_current_user)):
|
||||
return {"username": user.username}
|
||||
15
backend/app/config.py
Normal file
15
backend/app/config.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
database_url: str = "sqlite:///./data/leopost.db"
|
||||
secret_key: str = "change-me-to-a-random-secret-key"
|
||||
admin_username: str = "admin"
|
||||
admin_password: str = "changeme"
|
||||
access_token_expire_minutes: int = 1440 # 24h
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
20
backend/app/database.py
Normal file
20
backend/app/database.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, declarative_base
|
||||
|
||||
from .config import settings
|
||||
|
||||
connect_args = {}
|
||||
if settings.database_url.startswith("sqlite"):
|
||||
connect_args["check_same_thread"] = False
|
||||
|
||||
engine = create_engine(settings.database_url, connect_args=connect_args)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
156
backend/app/main.py
Normal file
156
backend/app/main.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""Leopost Full — FastAPI application.
|
||||
|
||||
IMPORTANT: root_path is intentionally NOT set in the FastAPI() constructor.
|
||||
It is passed only via Uvicorn's --root-path flag at runtime.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from .auth import hash_password
|
||||
from .auth import router as auth_router
|
||||
from .config import settings
|
||||
from .database import Base, SessionLocal, engine
|
||||
from .models import User
|
||||
from .routers.affiliates import router as affiliates_router
|
||||
from .routers.characters import router as characters_router
|
||||
from .routers.comments import router as comments_router
|
||||
from .routers.content import router as content_router
|
||||
from .routers.editorial import router as editorial_router
|
||||
from .routers.plans import router as plans_router
|
||||
from .routers.settings import router as settings_router
|
||||
from .routers.social import router as social_router
|
||||
|
||||
logger = logging.getLogger("leopost")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SPA static files with catch-all fallback
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class SPAStaticFiles(StaticFiles):
|
||||
"""Serve a React SPA: return index.html for any 404 so the client-side
|
||||
router handles unknown paths instead of returning a 404 from the server."""
|
||||
|
||||
async def get_response(self, path: str, scope):
|
||||
try:
|
||||
return await super().get_response(path, scope)
|
||||
except Exception:
|
||||
return await super().get_response("index.html", scope)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Background scheduler
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_scheduler_running = False
|
||||
|
||||
|
||||
def _scheduler_loop():
|
||||
"""Simple scheduler that checks for posts to publish every 60 seconds."""
|
||||
from .scheduler import check_and_publish
|
||||
|
||||
global _scheduler_running
|
||||
while _scheduler_running:
|
||||
try:
|
||||
check_and_publish()
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}")
|
||||
sleep(60)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Lifespan
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
global _scheduler_running
|
||||
|
||||
# Ensure data directory exists
|
||||
data_dir = Path("./data")
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create tables
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# Create admin user if not exists
|
||||
db = SessionLocal()
|
||||
try:
|
||||
existing = db.query(User).filter(User.username == settings.admin_username).first()
|
||||
if not existing:
|
||||
admin = User(
|
||||
username=settings.admin_username,
|
||||
hashed_password=hash_password(settings.admin_password),
|
||||
)
|
||||
db.add(admin)
|
||||
db.commit()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
# Start background scheduler
|
||||
_scheduler_running = True
|
||||
scheduler_thread = Thread(target=_scheduler_loop, daemon=True)
|
||||
scheduler_thread.start()
|
||||
logger.info("Background scheduler started")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown scheduler
|
||||
_scheduler_running = False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Application
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# CRITICAL: Do NOT pass root_path here — use Uvicorn --root-path instead.
|
||||
app = FastAPI(
|
||||
title="Leopost Full",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["http://localhost:5173"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# API routes (must be registered BEFORE the SPA catch-all mount)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
app.include_router(auth_router)
|
||||
app.include_router(characters_router)
|
||||
app.include_router(content_router)
|
||||
app.include_router(affiliates_router)
|
||||
app.include_router(plans_router)
|
||||
app.include_router(social_router)
|
||||
app.include_router(comments_router)
|
||||
app.include_router(settings_router)
|
||||
app.include_router(editorial_router)
|
||||
|
||||
|
||||
@app.get("/api/health")
|
||||
def health():
|
||||
return {"status": "ok", "version": "0.1.0"}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SPA catch-all mount (MUST be last)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_STATIC_DIR = Path(__file__).parent.parent.parent / "static"
|
||||
|
||||
if _STATIC_DIR.exists():
|
||||
app.mount("/", SPAStaticFiles(directory=str(_STATIC_DIR), html=True), name="spa")
|
||||
156
backend/app/models.py
Normal file
156
backend/app/models.py
Normal file
@@ -0,0 +1,156 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, JSON, String, Text
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
# === Phase 1: Core ===
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
username = Column(String(50), unique=True, nullable=False)
|
||||
hashed_password = Column(String(255), nullable=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
class Character(Base):
|
||||
__tablename__ = "characters"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String(100), nullable=False)
|
||||
niche = Column(String(200), nullable=False)
|
||||
topics = Column(JSON, default=list)
|
||||
tone = Column(Text)
|
||||
visual_style = Column(JSON, default=dict)
|
||||
social_accounts = Column(JSON, default=dict)
|
||||
affiliate_links = Column(JSON, default=list)
|
||||
avatar_url = Column(String(500))
|
||||
is_active = Column(Boolean, default=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
|
||||
# === Phase 2: Content Generation ===
|
||||
|
||||
class Post(Base):
|
||||
__tablename__ = "posts"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
character_id = Column(Integer, ForeignKey("characters.id"), nullable=False)
|
||||
content_type = Column(String(20), default="text") # text, image, video, carousel
|
||||
text_content = Column(Text)
|
||||
hashtags = Column(JSON, default=list)
|
||||
image_url = Column(String(500))
|
||||
video_url = Column(String(500))
|
||||
media_urls = Column(JSON, default=list)
|
||||
affiliate_links_used = Column(JSON, default=list)
|
||||
llm_provider = Column(String(50))
|
||||
llm_model = Column(String(100))
|
||||
platform_hint = Column(String(20)) # which platform this was generated for
|
||||
status = Column(String(20), default="draft") # draft, approved, scheduled, published, failed
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
|
||||
# === Phase 4: Affiliate Links ===
|
||||
|
||||
class AffiliateLink(Base):
|
||||
__tablename__ = "affiliate_links"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
character_id = Column(Integer, ForeignKey("characters.id"), nullable=True) # null = global
|
||||
network = Column(String(100), nullable=False) # amazon, clickbank, etc.
|
||||
name = Column(String(200), nullable=False)
|
||||
url = Column(String(1000), nullable=False)
|
||||
tag = Column(String(100))
|
||||
topics = Column(JSON, default=list) # relevant topics for auto-insertion
|
||||
is_active = Column(Boolean, default=True)
|
||||
click_count = Column(Integer, default=0)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
# === Phase 5: Scheduling ===
|
||||
|
||||
class EditorialPlan(Base):
|
||||
__tablename__ = "editorial_plans"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
character_id = Column(Integer, ForeignKey("characters.id"), nullable=False)
|
||||
name = Column(String(200), nullable=False)
|
||||
frequency = Column(String(20), default="daily") # daily, twice_daily, weekly, custom
|
||||
posts_per_day = Column(Integer, default=1)
|
||||
platforms = Column(JSON, default=list) # ["facebook", "instagram", "youtube"]
|
||||
content_types = Column(JSON, default=list) # ["text", "image", "video"]
|
||||
posting_times = Column(JSON, default=list) # ["09:00", "18:00"]
|
||||
start_date = Column(DateTime)
|
||||
end_date = Column(DateTime, nullable=True)
|
||||
is_active = Column(Boolean, default=False)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
|
||||
class ScheduledPost(Base):
|
||||
__tablename__ = "scheduled_posts"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
plan_id = Column(Integer, ForeignKey("editorial_plans.id"), nullable=True)
|
||||
post_id = Column(Integer, ForeignKey("posts.id"), nullable=False)
|
||||
platform = Column(String(20), nullable=False)
|
||||
scheduled_at = Column(DateTime, nullable=False)
|
||||
published_at = Column(DateTime, nullable=True)
|
||||
status = Column(String(20), default="pending") # pending, publishing, published, failed
|
||||
error_message = Column(Text, nullable=True)
|
||||
external_post_id = Column(String(200), nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
# === Phase 6-10: Social Accounts ===
|
||||
|
||||
class SocialAccount(Base):
|
||||
__tablename__ = "social_accounts"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
character_id = Column(Integer, ForeignKey("characters.id"), nullable=False)
|
||||
platform = Column(String(20), nullable=False) # facebook, instagram, youtube, tiktok
|
||||
account_name = Column(String(200))
|
||||
account_id = Column(String(200))
|
||||
access_token = Column(Text)
|
||||
refresh_token = Column(Text, nullable=True)
|
||||
token_expires_at = Column(DateTime, nullable=True)
|
||||
page_id = Column(String(200), nullable=True) # Facebook page ID
|
||||
extra_data = Column(JSON, default=dict) # platform-specific data
|
||||
is_active = Column(Boolean, default=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
# === Phase 11: Comment Management ===
|
||||
|
||||
class Comment(Base):
|
||||
__tablename__ = "comments"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
scheduled_post_id = Column(Integer, ForeignKey("scheduled_posts.id"), nullable=True)
|
||||
platform = Column(String(20), nullable=False)
|
||||
external_comment_id = Column(String(200))
|
||||
author_name = Column(String(200))
|
||||
author_id = Column(String(200))
|
||||
content = Column(Text)
|
||||
ai_suggested_reply = Column(Text, nullable=True)
|
||||
approved_reply = Column(Text, nullable=True)
|
||||
reply_status = Column(String(20), default="pending") # pending, approved, replied, ignored
|
||||
replied_at = Column(DateTime, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
# === System Settings ===
|
||||
|
||||
class SystemSetting(Base):
|
||||
__tablename__ = "system_settings"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
key = Column(String(100), unique=True, nullable=False)
|
||||
value = Column(JSON)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
0
backend/app/routers/__init__.py
Normal file
0
backend/app/routers/__init__.py
Normal file
75
backend/app/routers/affiliates.py
Normal file
75
backend/app/routers/affiliates.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Affiliate links CRUD router.
|
||||
|
||||
Manages affiliate links that can be injected into generated content.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import AffiliateLink
|
||||
from ..schemas import AffiliateLinkCreate, AffiliateLinkResponse, AffiliateLinkUpdate
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/affiliates",
|
||||
tags=["affiliates"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[AffiliateLinkResponse])
|
||||
def list_affiliate_links(
|
||||
character_id: int | None = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all affiliate links, optionally filtered by character."""
|
||||
query = db.query(AffiliateLink)
|
||||
if character_id is not None:
|
||||
query = query.filter(AffiliateLink.character_id == character_id)
|
||||
return query.order_by(AffiliateLink.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/{link_id}", response_model=AffiliateLinkResponse)
|
||||
def get_affiliate_link(link_id: int, db: Session = Depends(get_db)):
|
||||
"""Get a single affiliate link by ID."""
|
||||
link = db.query(AffiliateLink).filter(AffiliateLink.id == link_id).first()
|
||||
if not link:
|
||||
raise HTTPException(status_code=404, detail="Affiliate link not found")
|
||||
return link
|
||||
|
||||
|
||||
@router.post("/", response_model=AffiliateLinkResponse, status_code=201)
|
||||
def create_affiliate_link(data: AffiliateLinkCreate, db: Session = Depends(get_db)):
|
||||
"""Create a new affiliate link."""
|
||||
link = AffiliateLink(**data.model_dump())
|
||||
db.add(link)
|
||||
db.commit()
|
||||
db.refresh(link)
|
||||
return link
|
||||
|
||||
|
||||
@router.put("/{link_id}", response_model=AffiliateLinkResponse)
|
||||
def update_affiliate_link(
|
||||
link_id: int, data: AffiliateLinkUpdate, db: Session = Depends(get_db)
|
||||
):
|
||||
"""Update an affiliate link."""
|
||||
link = db.query(AffiliateLink).filter(AffiliateLink.id == link_id).first()
|
||||
if not link:
|
||||
raise HTTPException(status_code=404, detail="Affiliate link not found")
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for key, value in update_data.items():
|
||||
setattr(link, key, value)
|
||||
db.commit()
|
||||
db.refresh(link)
|
||||
return link
|
||||
|
||||
|
||||
@router.delete("/{link_id}", status_code=204)
|
||||
def delete_affiliate_link(link_id: int, db: Session = Depends(get_db)):
|
||||
"""Delete an affiliate link."""
|
||||
link = db.query(AffiliateLink).filter(AffiliateLink.id == link_id).first()
|
||||
if not link:
|
||||
raise HTTPException(status_code=404, detail="Affiliate link not found")
|
||||
db.delete(link)
|
||||
db.commit()
|
||||
62
backend/app/routers/characters.py
Normal file
62
backend/app/routers/characters.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import Character
|
||||
from ..schemas import CharacterCreate, CharacterResponse, CharacterUpdate
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/characters",
|
||||
tags=["characters"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[CharacterResponse])
|
||||
def list_characters(db: Session = Depends(get_db)):
|
||||
return db.query(Character).order_by(Character.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/{character_id}", response_model=CharacterResponse)
|
||||
def get_character(character_id: int, db: Session = Depends(get_db)):
|
||||
character = db.query(Character).filter(Character.id == character_id).first()
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
return character
|
||||
|
||||
|
||||
@router.post("/", response_model=CharacterResponse, status_code=201)
|
||||
def create_character(data: CharacterCreate, db: Session = Depends(get_db)):
|
||||
character = Character(**data.model_dump())
|
||||
db.add(character)
|
||||
db.commit()
|
||||
db.refresh(character)
|
||||
return character
|
||||
|
||||
|
||||
@router.put("/{character_id}", response_model=CharacterResponse)
|
||||
def update_character(
|
||||
character_id: int, data: CharacterUpdate, db: Session = Depends(get_db)
|
||||
):
|
||||
character = db.query(Character).filter(Character.id == character_id).first()
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for key, value in update_data.items():
|
||||
setattr(character, key, value)
|
||||
character.updated_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(character)
|
||||
return character
|
||||
|
||||
|
||||
@router.delete("/{character_id}", status_code=204)
|
||||
def delete_character(character_id: int, db: Session = Depends(get_db)):
|
||||
character = db.query(Character).filter(Character.id == character_id).first()
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
db.delete(character)
|
||||
db.commit()
|
||||
281
backend/app/routers/comments.py
Normal file
281
backend/app/routers/comments.py
Normal file
@@ -0,0 +1,281 @@
|
||||
"""Comment management router.
|
||||
|
||||
Handles fetching, reviewing, and replying to comments on published posts.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import Comment, Post, ScheduledPost, SocialAccount, SystemSetting
|
||||
from ..schemas import CommentAction, CommentResponse
|
||||
from ..services.llm import get_llm_provider
|
||||
from ..services.social import get_publisher
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/comments",
|
||||
tags=["comments"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[CommentResponse])
|
||||
def list_comments(
|
||||
platform: str | None = Query(None),
|
||||
reply_status: str | None = Query(None),
|
||||
scheduled_post_id: int | None = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List comments with optional filters."""
|
||||
query = db.query(Comment)
|
||||
if platform is not None:
|
||||
query = query.filter(Comment.platform == platform)
|
||||
if reply_status is not None:
|
||||
query = query.filter(Comment.reply_status == reply_status)
|
||||
if scheduled_post_id is not None:
|
||||
query = query.filter(Comment.scheduled_post_id == scheduled_post_id)
|
||||
return query.order_by(Comment.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/pending", response_model=list[CommentResponse])
|
||||
def list_pending_comments(db: Session = Depends(get_db)):
|
||||
"""Get only pending comments (reply_status='pending')."""
|
||||
return (
|
||||
db.query(Comment)
|
||||
.filter(Comment.reply_status == "pending")
|
||||
.order_by(Comment.created_at.desc())
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{comment_id}", response_model=CommentResponse)
|
||||
def get_comment(comment_id: int, db: Session = Depends(get_db)):
|
||||
"""Get a single comment by ID."""
|
||||
comment = db.query(Comment).filter(Comment.id == comment_id).first()
|
||||
if not comment:
|
||||
raise HTTPException(status_code=404, detail="Comment not found")
|
||||
return comment
|
||||
|
||||
|
||||
@router.post("/{comment_id}/action", response_model=CommentResponse)
|
||||
def action_on_comment(
|
||||
comment_id: int, data: CommentAction, db: Session = Depends(get_db)
|
||||
):
|
||||
"""Take action on a comment: approve, edit, or ignore."""
|
||||
comment = db.query(Comment).filter(Comment.id == comment_id).first()
|
||||
if not comment:
|
||||
raise HTTPException(status_code=404, detail="Comment not found")
|
||||
|
||||
if data.action == "approve":
|
||||
comment.approved_reply = comment.ai_suggested_reply
|
||||
comment.reply_status = "approved"
|
||||
elif data.action == "edit":
|
||||
if not data.reply_text:
|
||||
raise HTTPException(status_code=400, detail="reply_text required for edit action")
|
||||
comment.approved_reply = data.reply_text
|
||||
comment.reply_status = "approved"
|
||||
elif data.action == "ignore":
|
||||
comment.reply_status = "ignored"
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail=f"Unknown action '{data.action}'. Use: approve, edit, ignore")
|
||||
|
||||
db.commit()
|
||||
db.refresh(comment)
|
||||
return comment
|
||||
|
||||
|
||||
@router.post("/{comment_id}/reply", response_model=CommentResponse)
|
||||
def reply_to_comment(comment_id: int, db: Session = Depends(get_db)):
|
||||
"""Send the approved reply via the social platform API."""
|
||||
comment = db.query(Comment).filter(Comment.id == comment_id).first()
|
||||
if not comment:
|
||||
raise HTTPException(status_code=404, detail="Comment not found")
|
||||
|
||||
if not comment.approved_reply:
|
||||
raise HTTPException(status_code=400, detail="No approved reply to send. Use /action first.")
|
||||
|
||||
if not comment.external_comment_id:
|
||||
raise HTTPException(status_code=400, detail="No external comment ID available for reply")
|
||||
|
||||
# Find the social account for this platform via the scheduled post
|
||||
if not comment.scheduled_post_id:
|
||||
raise HTTPException(status_code=400, detail="Comment is not linked to a scheduled post")
|
||||
|
||||
scheduled = (
|
||||
db.query(ScheduledPost)
|
||||
.filter(ScheduledPost.id == comment.scheduled_post_id)
|
||||
.first()
|
||||
)
|
||||
if not scheduled:
|
||||
raise HTTPException(status_code=404, detail="Associated scheduled post not found")
|
||||
|
||||
post = db.query(Post).filter(Post.id == scheduled.post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Associated post not found")
|
||||
|
||||
account = (
|
||||
db.query(SocialAccount)
|
||||
.filter(
|
||||
SocialAccount.character_id == post.character_id,
|
||||
SocialAccount.platform == comment.platform,
|
||||
SocialAccount.is_active == True,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not account:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"No active {comment.platform} account found for this character",
|
||||
)
|
||||
|
||||
# Build publisher kwargs
|
||||
kwargs: dict = {}
|
||||
if account.platform == "facebook":
|
||||
kwargs["page_id"] = account.page_id
|
||||
elif account.platform == "instagram":
|
||||
kwargs["ig_user_id"] = account.account_id or (account.extra_data or {}).get("ig_user_id")
|
||||
|
||||
try:
|
||||
publisher = get_publisher(account.platform, account.access_token, **kwargs)
|
||||
success = publisher.reply_to_comment(comment.external_comment_id, comment.approved_reply)
|
||||
|
||||
if not success:
|
||||
raise RuntimeError("Platform returned failure for reply")
|
||||
|
||||
comment.reply_status = "replied"
|
||||
comment.replied_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(comment)
|
||||
return comment
|
||||
|
||||
except (RuntimeError, ValueError) as e:
|
||||
raise HTTPException(status_code=502, detail=f"Failed to send reply: {e}")
|
||||
|
||||
|
||||
@router.post("/fetch/{platform}")
|
||||
def fetch_comments(platform: str, db: Session = Depends(get_db)):
|
||||
"""Fetch new comments from a platform for all published posts.
|
||||
|
||||
Creates Comment records for any new comments not already in the database.
|
||||
Uses LLM to generate AI-suggested replies for each new comment.
|
||||
"""
|
||||
# Get all published scheduled posts for this platform
|
||||
published_posts = (
|
||||
db.query(ScheduledPost)
|
||||
.filter(
|
||||
ScheduledPost.platform == platform,
|
||||
ScheduledPost.status == "published",
|
||||
ScheduledPost.external_post_id != None,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not published_posts:
|
||||
return {"new_comments": 0, "message": f"No published posts found for {platform}"}
|
||||
|
||||
# Get LLM settings for AI reply generation
|
||||
llm_provider_name = None
|
||||
llm_api_key = None
|
||||
llm_model = None
|
||||
for key in ("llm_provider", "llm_api_key", "llm_model"):
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if setting:
|
||||
if key == "llm_provider":
|
||||
llm_provider_name = setting.value
|
||||
elif key == "llm_api_key":
|
||||
llm_api_key = setting.value
|
||||
elif key == "llm_model":
|
||||
llm_model = setting.value
|
||||
|
||||
llm = None
|
||||
if llm_provider_name and llm_api_key:
|
||||
try:
|
||||
llm = get_llm_provider(llm_provider_name, llm_api_key, llm_model)
|
||||
except ValueError:
|
||||
pass # LLM not available, skip AI replies
|
||||
|
||||
new_comment_count = 0
|
||||
|
||||
for scheduled in published_posts:
|
||||
# Get the post to find the character
|
||||
post = db.query(Post).filter(Post.id == scheduled.post_id).first()
|
||||
if not post:
|
||||
continue
|
||||
|
||||
# Find the social account
|
||||
account = (
|
||||
db.query(SocialAccount)
|
||||
.filter(
|
||||
SocialAccount.character_id == post.character_id,
|
||||
SocialAccount.platform == platform,
|
||||
SocialAccount.is_active == True,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not account or not account.access_token:
|
||||
continue
|
||||
|
||||
# Build publisher kwargs
|
||||
kwargs: dict = {}
|
||||
if account.platform == "facebook":
|
||||
kwargs["page_id"] = account.page_id
|
||||
elif account.platform == "instagram":
|
||||
kwargs["ig_user_id"] = account.account_id or (account.extra_data or {}).get("ig_user_id")
|
||||
|
||||
try:
|
||||
publisher = get_publisher(account.platform, account.access_token, **kwargs)
|
||||
comments = publisher.get_comments(scheduled.external_post_id)
|
||||
except (RuntimeError, ValueError):
|
||||
continue # Skip this post if API call fails
|
||||
|
||||
for ext_comment in comments:
|
||||
ext_id = ext_comment.get("id", "")
|
||||
if not ext_id:
|
||||
continue
|
||||
|
||||
# Check if comment already exists
|
||||
existing = (
|
||||
db.query(Comment)
|
||||
.filter(Comment.external_comment_id == ext_id)
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
continue
|
||||
|
||||
# Generate AI suggested reply if LLM is available
|
||||
ai_reply = None
|
||||
if llm:
|
||||
try:
|
||||
system_prompt = (
|
||||
f"You are managing social media comments for a content creator. "
|
||||
f"Write a friendly, on-brand reply to this comment. "
|
||||
f"Keep it concise (1-2 sentences). Be authentic and engaging."
|
||||
)
|
||||
prompt = (
|
||||
f"Comment by {ext_comment.get('author', 'someone')}: "
|
||||
f"\"{ext_comment.get('text', '')}\"\n\n"
|
||||
f"Write a reply:"
|
||||
)
|
||||
ai_reply = llm.generate(prompt, system=system_prompt)
|
||||
except RuntimeError:
|
||||
pass # Skip AI reply if generation fails
|
||||
|
||||
# Create comment record
|
||||
comment = Comment(
|
||||
scheduled_post_id=scheduled.id,
|
||||
platform=platform,
|
||||
external_comment_id=ext_id,
|
||||
author_name=ext_comment.get("author", "Unknown"),
|
||||
author_id=ext_comment.get("id", ""),
|
||||
content=ext_comment.get("text", ""),
|
||||
ai_suggested_reply=ai_reply,
|
||||
reply_status="pending",
|
||||
)
|
||||
db.add(comment)
|
||||
new_comment_count += 1
|
||||
|
||||
db.commit()
|
||||
return {"new_comments": new_comment_count, "message": f"Fetched {new_comment_count} new comments from {platform}"}
|
||||
225
backend/app/routers/content.py
Normal file
225
backend/app/routers/content.py
Normal file
@@ -0,0 +1,225 @@
|
||||
"""Content generation router.
|
||||
|
||||
Handles post generation via LLM, image generation, and CRUD operations on posts.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import AffiliateLink, Character, Post, SystemSetting
|
||||
from ..schemas import (
|
||||
GenerateContentRequest,
|
||||
GenerateImageRequest,
|
||||
PostResponse,
|
||||
PostUpdate,
|
||||
)
|
||||
from ..services.content import generate_hashtags, generate_post_text, inject_affiliate_links
|
||||
from ..services.images import get_image_provider
|
||||
from ..services.llm import get_llm_provider
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/content",
|
||||
tags=["content"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
def _get_setting(db: Session, key: str) -> str | None:
|
||||
"""Retrieve a system setting value by key."""
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if setting is None:
|
||||
return None
|
||||
return setting.value
|
||||
|
||||
|
||||
@router.post("/generate", response_model=PostResponse)
|
||||
def generate_content(request: GenerateContentRequest, db: Session = Depends(get_db)):
|
||||
"""Generate content for a character using LLM."""
|
||||
# Validate character exists
|
||||
character = db.query(Character).filter(Character.id == request.character_id).first()
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
# Get LLM settings
|
||||
provider_name = request.provider or _get_setting(db, "llm_provider")
|
||||
api_key = _get_setting(db, "llm_api_key")
|
||||
model = request.model or _get_setting(db, "llm_model")
|
||||
|
||||
if not provider_name:
|
||||
raise HTTPException(status_code=400, detail="LLM provider not configured. Set 'llm_provider' in settings.")
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=400, detail="LLM API key not configured. Set 'llm_api_key' in settings.")
|
||||
|
||||
# Build character dict for content service
|
||||
char_dict = {
|
||||
"name": character.name,
|
||||
"niche": character.niche,
|
||||
"topics": character.topics or [],
|
||||
"tone": character.tone or "professional",
|
||||
}
|
||||
|
||||
# Create LLM provider and generate text
|
||||
llm = get_llm_provider(provider_name, api_key, model)
|
||||
text = generate_post_text(
|
||||
character=char_dict,
|
||||
llm_provider=llm,
|
||||
platform=request.platform,
|
||||
topic_hint=request.topic_hint,
|
||||
)
|
||||
|
||||
# Generate hashtags
|
||||
hashtags = generate_hashtags(text, llm, request.platform)
|
||||
|
||||
# Handle affiliate links
|
||||
affiliate_links_used: list[dict] = []
|
||||
if request.include_affiliates:
|
||||
links = (
|
||||
db.query(AffiliateLink)
|
||||
.filter(
|
||||
AffiliateLink.is_active == True,
|
||||
(AffiliateLink.character_id == character.id) | (AffiliateLink.character_id == None),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if links:
|
||||
link_dicts = [
|
||||
{
|
||||
"url": link.url,
|
||||
"label": link.name,
|
||||
"keywords": link.topics or [],
|
||||
}
|
||||
for link in links
|
||||
]
|
||||
text, affiliate_links_used = inject_affiliate_links(
|
||||
text, link_dicts, character.topics or []
|
||||
)
|
||||
|
||||
# Create post record
|
||||
post = Post(
|
||||
character_id=character.id,
|
||||
content_type=request.content_type,
|
||||
text_content=text,
|
||||
hashtags=hashtags,
|
||||
affiliate_links_used=affiliate_links_used,
|
||||
llm_provider=provider_name,
|
||||
llm_model=model,
|
||||
platform_hint=request.platform,
|
||||
status="draft",
|
||||
)
|
||||
db.add(post)
|
||||
db.commit()
|
||||
db.refresh(post)
|
||||
return post
|
||||
|
||||
|
||||
@router.post("/generate-image", response_model=PostResponse)
|
||||
def generate_image(request: GenerateImageRequest, db: Session = Depends(get_db)):
|
||||
"""Generate an image for a character and attach to a post."""
|
||||
# Validate character exists
|
||||
character = db.query(Character).filter(Character.id == request.character_id).first()
|
||||
if not character:
|
||||
raise HTTPException(status_code=404, detail="Character not found")
|
||||
|
||||
# Get image settings
|
||||
provider_name = request.provider or _get_setting(db, "image_provider")
|
||||
api_key = _get_setting(db, "image_api_key")
|
||||
|
||||
if not provider_name:
|
||||
raise HTTPException(status_code=400, detail="Image provider not configured. Set 'image_provider' in settings.")
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=400, detail="Image API key not configured. Set 'image_api_key' in settings.")
|
||||
|
||||
# Build prompt from character if not provided
|
||||
prompt = request.prompt
|
||||
if not prompt:
|
||||
style_hint = request.style_hint or ""
|
||||
visual_style = character.visual_style or {}
|
||||
style_desc = visual_style.get("description", "")
|
||||
prompt = (
|
||||
f"Create a social media image for {character.name}, "
|
||||
f"a content creator in the {character.niche} niche. "
|
||||
f"Style: {style_desc} {style_hint}".strip()
|
||||
)
|
||||
|
||||
# Generate image
|
||||
image_provider = get_image_provider(provider_name, api_key)
|
||||
image_url = image_provider.generate(prompt, size=request.size)
|
||||
|
||||
# Create a new post with the image
|
||||
post = Post(
|
||||
character_id=character.id,
|
||||
content_type="image",
|
||||
image_url=image_url,
|
||||
platform_hint="instagram",
|
||||
status="draft",
|
||||
)
|
||||
db.add(post)
|
||||
db.commit()
|
||||
db.refresh(post)
|
||||
return post
|
||||
|
||||
|
||||
@router.get("/posts", response_model=list[PostResponse])
|
||||
def list_posts(
|
||||
character_id: int | None = Query(None),
|
||||
status: str | None = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all posts with optional filters."""
|
||||
query = db.query(Post)
|
||||
if character_id is not None:
|
||||
query = query.filter(Post.character_id == character_id)
|
||||
if status is not None:
|
||||
query = query.filter(Post.status == status)
|
||||
return query.order_by(Post.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/posts/{post_id}", response_model=PostResponse)
|
||||
def get_post(post_id: int, db: Session = Depends(get_db)):
|
||||
"""Get a single post by ID."""
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
return post
|
||||
|
||||
|
||||
@router.put("/posts/{post_id}", response_model=PostResponse)
|
||||
def update_post(post_id: int, data: PostUpdate, db: Session = Depends(get_db)):
|
||||
"""Update a post."""
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for key, value in update_data.items():
|
||||
setattr(post, key, value)
|
||||
post.updated_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(post)
|
||||
return post
|
||||
|
||||
|
||||
@router.delete("/posts/{post_id}", status_code=204)
|
||||
def delete_post(post_id: int, db: Session = Depends(get_db)):
|
||||
"""Delete a post."""
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
db.delete(post)
|
||||
db.commit()
|
||||
|
||||
|
||||
@router.post("/posts/{post_id}/approve", response_model=PostResponse)
|
||||
def approve_post(post_id: int, db: Session = Depends(get_db)):
|
||||
"""Approve a post (set status to 'approved')."""
|
||||
post = db.query(Post).filter(Post.id == post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Post not found")
|
||||
post.status = "approved"
|
||||
post.updated_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(post)
|
||||
return post
|
||||
112
backend/app/routers/editorial.py
Normal file
112
backend/app/routers/editorial.py
Normal file
@@ -0,0 +1,112 @@
|
||||
"""Editorial Calendar router.
|
||||
|
||||
Espone endpoint per il calendario editoriale con awareness levels e formati narrativi.
|
||||
"""
|
||||
|
||||
import csv
|
||||
import io
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..services.calendar_service import CalendarService, FORMATI_NARRATIVI, AWARENESS_LEVELS
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/editorial",
|
||||
tags=["editorial"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
_calendar_service = CalendarService()
|
||||
|
||||
|
||||
# === Schemas locali ===
|
||||
|
||||
class CalendarGenerateRequest(BaseModel):
|
||||
topics: list[str]
|
||||
format_narrativo: Optional[str] = None
|
||||
awareness_level: Optional[int] = None
|
||||
num_posts: int = 7
|
||||
start_date: Optional[str] = None
|
||||
character_id: Optional[int] = None
|
||||
|
||||
|
||||
class ExportCsvRequest(BaseModel):
|
||||
slots: list[dict]
|
||||
|
||||
|
||||
# === Endpoints ===
|
||||
|
||||
@router.get("/formats")
|
||||
def get_formats():
|
||||
"""Restituisce i format narrativi disponibili con i relativi awareness levels."""
|
||||
return {
|
||||
"formats": _calendar_service.get_formats(),
|
||||
"awareness_levels": [
|
||||
{"value": k, "label": v}
|
||||
for k, v in AWARENESS_LEVELS.items()
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@router.post("/generate-calendar")
|
||||
def generate_calendar(request: CalendarGenerateRequest, db: Session = Depends(get_db)):
|
||||
"""Genera un calendario editoriale con awareness levels."""
|
||||
if not request.topics:
|
||||
return {"slots": [], "totale_post": 0}
|
||||
|
||||
slots = _calendar_service.generate_calendar(
|
||||
topics=request.topics,
|
||||
num_posts=request.num_posts,
|
||||
format_narrativo=request.format_narrativo,
|
||||
awareness_level=request.awareness_level,
|
||||
start_date=request.start_date,
|
||||
)
|
||||
|
||||
return {
|
||||
"slots": slots,
|
||||
"totale_post": len(slots),
|
||||
}
|
||||
|
||||
|
||||
@router.post("/export-csv")
|
||||
def export_csv(request: ExportCsvRequest):
|
||||
"""Esporta il calendario editoriale come CSV per Canva."""
|
||||
output = io.StringIO()
|
||||
fieldnames = [
|
||||
"indice",
|
||||
"data_pubblicazione",
|
||||
"topic",
|
||||
"formato_narrativo",
|
||||
"awareness_level",
|
||||
"awareness_label",
|
||||
"note",
|
||||
]
|
||||
|
||||
writer = csv.DictWriter(output, fieldnames=fieldnames, extrasaction="ignore")
|
||||
writer.writeheader()
|
||||
|
||||
for slot in request.slots:
|
||||
writer.writerow({
|
||||
"indice": slot.get("indice", ""),
|
||||
"data_pubblicazione": slot.get("data_pubblicazione", ""),
|
||||
"topic": slot.get("topic", ""),
|
||||
"formato_narrativo": slot.get("formato_narrativo", ""),
|
||||
"awareness_level": slot.get("awareness_level", ""),
|
||||
"awareness_label": slot.get("awareness_label", ""),
|
||||
"note": slot.get("note", ""),
|
||||
})
|
||||
|
||||
output.seek(0)
|
||||
return StreamingResponse(
|
||||
iter([output.getvalue()]),
|
||||
media_type="text/csv",
|
||||
headers={
|
||||
"Content-Disposition": "attachment; filename=calendario_editoriale.csv"
|
||||
},
|
||||
)
|
||||
150
backend/app/routers/plans.py
Normal file
150
backend/app/routers/plans.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""Editorial plans and scheduled posts router.
|
||||
|
||||
Manages editorial plans (posting schedules) and individual scheduled post entries.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import EditorialPlan, ScheduledPost
|
||||
from ..schemas import (
|
||||
EditorialPlanCreate,
|
||||
EditorialPlanResponse,
|
||||
EditorialPlanUpdate,
|
||||
ScheduledPostCreate,
|
||||
ScheduledPostResponse,
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/plans",
|
||||
tags=["plans"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
# === Editorial Plans ===
|
||||
|
||||
|
||||
@router.get("/", response_model=list[EditorialPlanResponse])
|
||||
def list_plans(
|
||||
character_id: int | None = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all editorial plans, optionally filtered by character."""
|
||||
query = db.query(EditorialPlan)
|
||||
if character_id is not None:
|
||||
query = query.filter(EditorialPlan.character_id == character_id)
|
||||
return query.order_by(EditorialPlan.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/scheduled", response_model=list[ScheduledPostResponse])
|
||||
def list_all_scheduled_posts(
|
||||
platform: str | None = Query(None),
|
||||
status: str | None = Query(None),
|
||||
date_from: datetime | None = Query(None),
|
||||
date_after: datetime | None = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""Get all scheduled posts across all plans with optional filters."""
|
||||
query = db.query(ScheduledPost)
|
||||
if platform is not None:
|
||||
query = query.filter(ScheduledPost.platform == platform)
|
||||
if status is not None:
|
||||
query = query.filter(ScheduledPost.status == status)
|
||||
if date_from is not None:
|
||||
query = query.filter(ScheduledPost.scheduled_at >= date_from)
|
||||
if date_after is not None:
|
||||
query = query.filter(ScheduledPost.scheduled_at <= date_after)
|
||||
return query.order_by(ScheduledPost.scheduled_at).all()
|
||||
|
||||
|
||||
@router.get("/{plan_id}", response_model=EditorialPlanResponse)
|
||||
def get_plan(plan_id: int, db: Session = Depends(get_db)):
|
||||
"""Get a single editorial plan by ID."""
|
||||
plan = db.query(EditorialPlan).filter(EditorialPlan.id == plan_id).first()
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Editorial plan not found")
|
||||
return plan
|
||||
|
||||
|
||||
@router.post("/", response_model=EditorialPlanResponse, status_code=201)
|
||||
def create_plan(data: EditorialPlanCreate, db: Session = Depends(get_db)):
|
||||
"""Create a new editorial plan."""
|
||||
plan = EditorialPlan(**data.model_dump())
|
||||
db.add(plan)
|
||||
db.commit()
|
||||
db.refresh(plan)
|
||||
return plan
|
||||
|
||||
|
||||
@router.put("/{plan_id}", response_model=EditorialPlanResponse)
|
||||
def update_plan(
|
||||
plan_id: int, data: EditorialPlanUpdate, db: Session = Depends(get_db)
|
||||
):
|
||||
"""Update an editorial plan."""
|
||||
plan = db.query(EditorialPlan).filter(EditorialPlan.id == plan_id).first()
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Editorial plan not found")
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for key, value in update_data.items():
|
||||
setattr(plan, key, value)
|
||||
plan.updated_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(plan)
|
||||
return plan
|
||||
|
||||
|
||||
@router.delete("/{plan_id}", status_code=204)
|
||||
def delete_plan(plan_id: int, db: Session = Depends(get_db)):
|
||||
"""Delete an editorial plan and its associated scheduled posts."""
|
||||
plan = db.query(EditorialPlan).filter(EditorialPlan.id == plan_id).first()
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Editorial plan not found")
|
||||
# Delete associated scheduled posts first
|
||||
db.query(ScheduledPost).filter(ScheduledPost.plan_id == plan_id).delete()
|
||||
db.delete(plan)
|
||||
db.commit()
|
||||
|
||||
|
||||
@router.post("/{plan_id}/toggle", response_model=EditorialPlanResponse)
|
||||
def toggle_plan(plan_id: int, db: Session = Depends(get_db)):
|
||||
"""Toggle the is_active status of an editorial plan."""
|
||||
plan = db.query(EditorialPlan).filter(EditorialPlan.id == plan_id).first()
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Editorial plan not found")
|
||||
plan.is_active = not plan.is_active
|
||||
plan.updated_at = datetime.utcnow()
|
||||
db.commit()
|
||||
db.refresh(plan)
|
||||
return plan
|
||||
|
||||
|
||||
# === Scheduled Posts ===
|
||||
|
||||
|
||||
@router.get("/{plan_id}/schedule", response_model=list[ScheduledPostResponse])
|
||||
def get_plan_scheduled_posts(plan_id: int, db: Session = Depends(get_db)):
|
||||
"""Get all scheduled posts for a specific plan."""
|
||||
plan = db.query(EditorialPlan).filter(EditorialPlan.id == plan_id).first()
|
||||
if not plan:
|
||||
raise HTTPException(status_code=404, detail="Editorial plan not found")
|
||||
return (
|
||||
db.query(ScheduledPost)
|
||||
.filter(ScheduledPost.plan_id == plan_id)
|
||||
.order_by(ScheduledPost.scheduled_at)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
@router.post("/schedule", response_model=ScheduledPostResponse, status_code=201)
|
||||
def schedule_post(data: ScheduledPostCreate, db: Session = Depends(get_db)):
|
||||
"""Manually schedule a post."""
|
||||
scheduled = ScheduledPost(**data.model_dump())
|
||||
db.add(scheduled)
|
||||
db.commit()
|
||||
db.refresh(scheduled)
|
||||
return scheduled
|
||||
122
backend/app/routers/settings.py
Normal file
122
backend/app/routers/settings.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""System settings router.
|
||||
|
||||
Manages key-value system settings including API provider configuration.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import SystemSetting
|
||||
from ..schemas import SettingResponse, SettingUpdate
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/settings",
|
||||
tags=["settings"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/", response_model=list[SettingResponse])
|
||||
def list_settings(db: Session = Depends(get_db)):
|
||||
"""Get all system settings."""
|
||||
settings = db.query(SystemSetting).order_by(SystemSetting.key).all()
|
||||
return settings
|
||||
|
||||
|
||||
@router.get("/providers/status")
|
||||
def get_providers_status(db: Session = Depends(get_db)):
|
||||
"""Check which API providers are configured (have API keys set).
|
||||
|
||||
Returns a dict indicating configuration status for each provider category.
|
||||
"""
|
||||
# Helper to check if a setting exists and has a truthy value
|
||||
def _has_setting(key: str) -> str | None:
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if setting and setting.value:
|
||||
return setting.value if isinstance(setting.value, str) else str(setting.value)
|
||||
return None
|
||||
|
||||
# LLM provider
|
||||
llm_provider = _has_setting("llm_provider")
|
||||
llm_key = _has_setting("llm_api_key")
|
||||
|
||||
# Image provider
|
||||
image_provider = _has_setting("image_provider")
|
||||
image_key = _has_setting("image_api_key")
|
||||
|
||||
# Voice provider (future)
|
||||
voice_provider = _has_setting("voice_provider")
|
||||
voice_key = _has_setting("voice_api_key")
|
||||
|
||||
# Social platforms - check for any active social accounts
|
||||
from ..models import SocialAccount
|
||||
|
||||
social_platforms = {}
|
||||
for platform in ("facebook", "instagram", "youtube", "tiktok"):
|
||||
has_account = (
|
||||
db.query(SocialAccount)
|
||||
.filter(
|
||||
SocialAccount.platform == platform,
|
||||
SocialAccount.is_active == True,
|
||||
SocialAccount.access_token != None,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
social_platforms[platform] = has_account is not None
|
||||
|
||||
return {
|
||||
"llm": {
|
||||
"configured": bool(llm_provider and llm_key),
|
||||
"provider": llm_provider,
|
||||
},
|
||||
"image": {
|
||||
"configured": bool(image_provider and image_key),
|
||||
"provider": image_provider,
|
||||
},
|
||||
"voice": {
|
||||
"configured": bool(voice_provider and voice_key),
|
||||
"provider": voice_provider,
|
||||
},
|
||||
"social": social_platforms,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{key}", response_model=SettingResponse)
|
||||
def get_setting(key: str, db: Session = Depends(get_db)):
|
||||
"""Get a single setting by key."""
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if not setting:
|
||||
raise HTTPException(status_code=404, detail=f"Setting '{key}' not found")
|
||||
return setting
|
||||
|
||||
|
||||
@router.put("/{key}", response_model=SettingResponse)
|
||||
def upsert_setting(key: str, data: SettingUpdate, db: Session = Depends(get_db)):
|
||||
"""Create or update a setting by key.
|
||||
|
||||
If the setting exists, update its value. If not, create it.
|
||||
"""
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if setting:
|
||||
setting.value = data.value
|
||||
setting.updated_at = datetime.utcnow()
|
||||
else:
|
||||
setting = SystemSetting(key=key, value=data.value)
|
||||
db.add(setting)
|
||||
db.commit()
|
||||
db.refresh(setting)
|
||||
return setting
|
||||
|
||||
|
||||
@router.delete("/{key}", status_code=204)
|
||||
def delete_setting(key: str, db: Session = Depends(get_db)):
|
||||
"""Delete a setting by key."""
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if not setting:
|
||||
raise HTTPException(status_code=404, detail=f"Setting '{key}' not found")
|
||||
db.delete(setting)
|
||||
db.commit()
|
||||
203
backend/app/routers/social.py
Normal file
203
backend/app/routers/social.py
Normal file
@@ -0,0 +1,203 @@
|
||||
"""Social account management and publishing router.
|
||||
|
||||
Handles CRUD for social media accounts and manual publishing of scheduled posts.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..auth import get_current_user
|
||||
from ..database import get_db
|
||||
from ..models import Post, ScheduledPost, SocialAccount
|
||||
from ..schemas import (
|
||||
ScheduledPostResponse,
|
||||
SocialAccountCreate,
|
||||
SocialAccountResponse,
|
||||
SocialAccountUpdate,
|
||||
)
|
||||
from ..services.social import get_publisher
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/api/social",
|
||||
tags=["social"],
|
||||
dependencies=[Depends(get_current_user)],
|
||||
)
|
||||
|
||||
|
||||
# === Social Accounts ===
|
||||
|
||||
|
||||
@router.get("/accounts", response_model=list[SocialAccountResponse])
|
||||
def list_social_accounts(
|
||||
character_id: int | None = Query(None),
|
||||
db: Session = Depends(get_db),
|
||||
):
|
||||
"""List all social accounts, optionally filtered by character."""
|
||||
query = db.query(SocialAccount)
|
||||
if character_id is not None:
|
||||
query = query.filter(SocialAccount.character_id == character_id)
|
||||
return query.order_by(SocialAccount.created_at.desc()).all()
|
||||
|
||||
|
||||
@router.get("/accounts/{account_id}", response_model=SocialAccountResponse)
|
||||
def get_social_account(account_id: int, db: Session = Depends(get_db)):
|
||||
"""Get a single social account by ID."""
|
||||
account = db.query(SocialAccount).filter(SocialAccount.id == account_id).first()
|
||||
if not account:
|
||||
raise HTTPException(status_code=404, detail="Social account not found")
|
||||
return account
|
||||
|
||||
|
||||
@router.post("/accounts", response_model=SocialAccountResponse, status_code=201)
|
||||
def create_social_account(data: SocialAccountCreate, db: Session = Depends(get_db)):
|
||||
"""Create/connect a new social account."""
|
||||
account = SocialAccount(**data.model_dump())
|
||||
db.add(account)
|
||||
db.commit()
|
||||
db.refresh(account)
|
||||
return account
|
||||
|
||||
|
||||
@router.put("/accounts/{account_id}", response_model=SocialAccountResponse)
|
||||
def update_social_account(
|
||||
account_id: int, data: SocialAccountUpdate, db: Session = Depends(get_db)
|
||||
):
|
||||
"""Update a social account."""
|
||||
account = db.query(SocialAccount).filter(SocialAccount.id == account_id).first()
|
||||
if not account:
|
||||
raise HTTPException(status_code=404, detail="Social account not found")
|
||||
update_data = data.model_dump(exclude_unset=True)
|
||||
for key, value in update_data.items():
|
||||
setattr(account, key, value)
|
||||
db.commit()
|
||||
db.refresh(account)
|
||||
return account
|
||||
|
||||
|
||||
@router.delete("/accounts/{account_id}", status_code=204)
|
||||
def delete_social_account(account_id: int, db: Session = Depends(get_db)):
|
||||
"""Delete a social account."""
|
||||
account = db.query(SocialAccount).filter(SocialAccount.id == account_id).first()
|
||||
if not account:
|
||||
raise HTTPException(status_code=404, detail="Social account not found")
|
||||
db.delete(account)
|
||||
db.commit()
|
||||
|
||||
|
||||
@router.post("/accounts/{account_id}/test")
|
||||
def test_social_account(account_id: int, db: Session = Depends(get_db)):
|
||||
"""Test connection to a social account by making a simple API call."""
|
||||
account = db.query(SocialAccount).filter(SocialAccount.id == account_id).first()
|
||||
if not account:
|
||||
raise HTTPException(status_code=404, detail="Social account not found")
|
||||
|
||||
if not account.access_token:
|
||||
raise HTTPException(status_code=400, detail="No access token configured for this account")
|
||||
|
||||
try:
|
||||
# Build kwargs based on platform
|
||||
kwargs: dict = {}
|
||||
if account.platform == "facebook":
|
||||
if not account.page_id:
|
||||
raise HTTPException(status_code=400, detail="Facebook account requires page_id")
|
||||
kwargs["page_id"] = account.page_id
|
||||
elif account.platform == "instagram":
|
||||
ig_user_id = account.account_id or (account.extra_data or {}).get("ig_user_id")
|
||||
if not ig_user_id:
|
||||
raise HTTPException(status_code=400, detail="Instagram account requires ig_user_id")
|
||||
kwargs["ig_user_id"] = ig_user_id
|
||||
|
||||
# Try to instantiate the publisher (validates credentials format)
|
||||
get_publisher(account.platform, account.access_token, **kwargs)
|
||||
return {"status": "ok", "message": f"Connection to {account.platform} account is configured correctly"}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except RuntimeError as e:
|
||||
raise HTTPException(status_code=502, detail=f"Connection test failed: {e}")
|
||||
|
||||
|
||||
# === Publishing ===
|
||||
|
||||
|
||||
@router.post("/publish/{scheduled_post_id}", response_model=ScheduledPostResponse)
|
||||
def publish_scheduled_post(scheduled_post_id: int, db: Session = Depends(get_db)):
|
||||
"""Manually trigger publishing of a scheduled post."""
|
||||
scheduled = (
|
||||
db.query(ScheduledPost)
|
||||
.filter(ScheduledPost.id == scheduled_post_id)
|
||||
.first()
|
||||
)
|
||||
if not scheduled:
|
||||
raise HTTPException(status_code=404, detail="Scheduled post not found")
|
||||
|
||||
# Get the post content
|
||||
post = db.query(Post).filter(Post.id == scheduled.post_id).first()
|
||||
if not post:
|
||||
raise HTTPException(status_code=404, detail="Associated post not found")
|
||||
|
||||
# Find the social account for this platform and character
|
||||
account = (
|
||||
db.query(SocialAccount)
|
||||
.filter(
|
||||
SocialAccount.character_id == post.character_id,
|
||||
SocialAccount.platform == scheduled.platform,
|
||||
SocialAccount.is_active == True,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not account:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"No active {scheduled.platform} account found for character {post.character_id}",
|
||||
)
|
||||
|
||||
if not account.access_token:
|
||||
raise HTTPException(status_code=400, detail="Social account has no access token configured")
|
||||
|
||||
# Build publisher kwargs
|
||||
kwargs: dict = {}
|
||||
if account.platform == "facebook":
|
||||
kwargs["page_id"] = account.page_id
|
||||
elif account.platform == "instagram":
|
||||
kwargs["ig_user_id"] = account.account_id or (account.extra_data or {}).get("ig_user_id")
|
||||
|
||||
try:
|
||||
scheduled.status = "publishing"
|
||||
db.commit()
|
||||
|
||||
publisher = get_publisher(account.platform, account.access_token, **kwargs)
|
||||
|
||||
# Determine publish method based on content type
|
||||
text = post.text_content or ""
|
||||
if post.hashtags:
|
||||
text = f"{text}\n\n{' '.join(post.hashtags)}"
|
||||
|
||||
if post.video_url:
|
||||
external_id = publisher.publish_video(text, post.video_url)
|
||||
elif post.image_url:
|
||||
external_id = publisher.publish_image(text, post.image_url)
|
||||
else:
|
||||
external_id = publisher.publish_text(text)
|
||||
|
||||
# Update scheduled post
|
||||
scheduled.status = "published"
|
||||
scheduled.published_at = datetime.utcnow()
|
||||
scheduled.external_post_id = external_id
|
||||
|
||||
# Update post status
|
||||
post.status = "published"
|
||||
post.updated_at = datetime.utcnow()
|
||||
|
||||
db.commit()
|
||||
db.refresh(scheduled)
|
||||
return scheduled
|
||||
|
||||
except (RuntimeError, ValueError) as e:
|
||||
scheduled.status = "failed"
|
||||
scheduled.error_message = str(e)
|
||||
db.commit()
|
||||
db.refresh(scheduled)
|
||||
raise HTTPException(status_code=502, detail=f"Publishing failed: {e}")
|
||||
176
backend/app/scheduler.py
Normal file
176
backend/app/scheduler.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Background scheduler for Leopost Full.
|
||||
Handles automated content generation and post publishing.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from .database import SessionLocal
|
||||
from .models import EditorialPlan, Post, ScheduledPost, SocialAccount, SystemSetting
|
||||
from .services.content import generate_post_text, generate_hashtags
|
||||
from .services.llm import get_llm_provider
|
||||
from .services.social import get_publisher
|
||||
|
||||
logger = logging.getLogger("leopost.scheduler")
|
||||
|
||||
|
||||
def check_and_publish():
|
||||
"""Check for posts that need publishing and publish them."""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
now = datetime.utcnow()
|
||||
pending = (
|
||||
db.query(ScheduledPost)
|
||||
.filter(
|
||||
ScheduledPost.status == "pending",
|
||||
ScheduledPost.scheduled_at <= now,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
for sp in pending:
|
||||
try:
|
||||
_publish_single(sp, db)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish post {sp.id}: {e}")
|
||||
sp.status = "failed"
|
||||
sp.error_message = str(e)
|
||||
db.commit()
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def _publish_single(sp: ScheduledPost, db):
|
||||
"""Publish a single scheduled post."""
|
||||
post = db.query(Post).filter(Post.id == sp.post_id).first()
|
||||
if not post:
|
||||
sp.status = "failed"
|
||||
sp.error_message = "Post not found"
|
||||
db.commit()
|
||||
return
|
||||
|
||||
# Find social account for this character + platform
|
||||
account = (
|
||||
db.query(SocialAccount)
|
||||
.filter(
|
||||
SocialAccount.character_id == post.character_id,
|
||||
SocialAccount.platform == sp.platform,
|
||||
SocialAccount.is_active == True,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not account:
|
||||
sp.status = "failed"
|
||||
sp.error_message = f"No active {sp.platform} account found"
|
||||
db.commit()
|
||||
return
|
||||
|
||||
sp.status = "publishing"
|
||||
db.commit()
|
||||
|
||||
try:
|
||||
kwargs = {}
|
||||
if account.page_id:
|
||||
kwargs["page_id"] = account.page_id
|
||||
if hasattr(account, "extra_data") and account.extra_data:
|
||||
kwargs.update(account.extra_data)
|
||||
|
||||
publisher = get_publisher(sp.platform, account.access_token, **kwargs)
|
||||
|
||||
if post.video_url:
|
||||
ext_id = publisher.publish_video(post.text_content or "", post.video_url)
|
||||
elif post.image_url:
|
||||
ext_id = publisher.publish_image(post.text_content or "", post.image_url)
|
||||
else:
|
||||
text = post.text_content or ""
|
||||
if post.hashtags:
|
||||
text += "\n\n" + " ".join(post.hashtags)
|
||||
ext_id = publisher.publish_text(text)
|
||||
|
||||
sp.status = "published"
|
||||
sp.published_at = datetime.utcnow()
|
||||
sp.external_post_id = ext_id
|
||||
post.status = "published"
|
||||
db.commit()
|
||||
|
||||
except Exception as e:
|
||||
sp.status = "failed"
|
||||
sp.error_message = str(e)
|
||||
db.commit()
|
||||
raise
|
||||
|
||||
|
||||
def generate_planned_content():
|
||||
"""Generate content for active editorial plans."""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
plans = (
|
||||
db.query(EditorialPlan)
|
||||
.filter(EditorialPlan.is_active == True)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Get LLM settings
|
||||
llm_setting = _get_setting(db, "llm_provider", "claude")
|
||||
api_key_setting = _get_setting(db, "llm_api_key", "")
|
||||
model_setting = _get_setting(db, "llm_model", None)
|
||||
|
||||
if not api_key_setting:
|
||||
logger.warning("No LLM API key configured, skipping content generation")
|
||||
return
|
||||
|
||||
for plan in plans:
|
||||
try:
|
||||
_generate_for_plan(plan, db, llm_setting, api_key_setting, model_setting)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to generate for plan {plan.id}: {e}")
|
||||
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
def _generate_for_plan(plan, db, provider_name, api_key, model):
|
||||
"""Generate content for a single plan."""
|
||||
from .models import Character
|
||||
|
||||
character = db.query(Character).filter(Character.id == plan.character_id).first()
|
||||
if not character:
|
||||
return
|
||||
|
||||
provider = get_llm_provider(provider_name, api_key, model)
|
||||
char_dict = {
|
||||
"name": character.name,
|
||||
"niche": character.niche,
|
||||
"topics": character.topics or [],
|
||||
"tone": character.tone or "",
|
||||
}
|
||||
|
||||
for platform in plan.platforms or []:
|
||||
for content_type in plan.content_types or ["text"]:
|
||||
text = generate_post_text(char_dict, provider, platform)
|
||||
hashtags = generate_hashtags(text, provider, platform)
|
||||
|
||||
post = Post(
|
||||
character_id=character.id,
|
||||
content_type=content_type,
|
||||
text_content=text,
|
||||
hashtags=hashtags,
|
||||
llm_provider=provider_name,
|
||||
llm_model=model,
|
||||
platform_hint=platform,
|
||||
status="draft",
|
||||
)
|
||||
db.add(post)
|
||||
|
||||
db.commit()
|
||||
|
||||
|
||||
def _get_setting(db, key, default=None):
|
||||
"""Get a system setting value."""
|
||||
setting = db.query(SystemSetting).filter(SystemSetting.key == key).first()
|
||||
if setting:
|
||||
return setting.value
|
||||
return default
|
||||
320
backend/app/schemas.py
Normal file
320
backend/app/schemas.py
Normal file
@@ -0,0 +1,320 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
# === Auth ===
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
token_type: str = "bearer"
|
||||
|
||||
|
||||
# === Characters ===
|
||||
|
||||
class CharacterBase(BaseModel):
|
||||
name: str
|
||||
niche: str
|
||||
topics: list[str] = []
|
||||
tone: Optional[str] = None
|
||||
visual_style: dict = {}
|
||||
social_accounts: dict = {}
|
||||
affiliate_links: list[dict] = []
|
||||
avatar_url: Optional[str] = None
|
||||
is_active: bool = True
|
||||
|
||||
|
||||
class CharacterCreate(CharacterBase):
|
||||
pass
|
||||
|
||||
|
||||
class CharacterUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
niche: Optional[str] = None
|
||||
topics: Optional[list[str]] = None
|
||||
tone: Optional[str] = None
|
||||
visual_style: Optional[dict] = None
|
||||
social_accounts: Optional[dict] = None
|
||||
affiliate_links: Optional[list[dict]] = None
|
||||
avatar_url: Optional[str] = None
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
|
||||
class CharacterResponse(CharacterBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# === Posts / Content ===
|
||||
|
||||
class PostCreate(BaseModel):
|
||||
character_id: int
|
||||
content_type: str = "text"
|
||||
platform_hint: str = "instagram"
|
||||
text_content: Optional[str] = None
|
||||
hashtags: list[str] = []
|
||||
image_url: Optional[str] = None
|
||||
video_url: Optional[str] = None
|
||||
media_urls: list[str] = []
|
||||
affiliate_links_used: list[dict] = []
|
||||
status: str = "draft"
|
||||
|
||||
|
||||
class PostUpdate(BaseModel):
|
||||
text_content: Optional[str] = None
|
||||
hashtags: Optional[list[str]] = None
|
||||
image_url: Optional[str] = None
|
||||
video_url: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
affiliate_links_used: Optional[list[dict]] = None
|
||||
|
||||
|
||||
class PostResponse(BaseModel):
|
||||
id: int
|
||||
character_id: int
|
||||
content_type: str
|
||||
text_content: Optional[str] = None
|
||||
hashtags: list[str] = []
|
||||
image_url: Optional[str] = None
|
||||
video_url: Optional[str] = None
|
||||
media_urls: list[str] = []
|
||||
affiliate_links_used: list[dict] = []
|
||||
llm_provider: Optional[str] = None
|
||||
llm_model: Optional[str] = None
|
||||
platform_hint: Optional[str] = None
|
||||
status: str
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class GenerateContentRequest(BaseModel):
|
||||
character_id: int
|
||||
platform: str = "instagram"
|
||||
content_type: str = "text"
|
||||
topic_hint: Optional[str] = None
|
||||
include_affiliates: bool = True
|
||||
provider: Optional[str] = None # override default LLM
|
||||
model: Optional[str] = None
|
||||
|
||||
|
||||
class GenerateImageRequest(BaseModel):
|
||||
character_id: int
|
||||
prompt: Optional[str] = None # auto-generated if not provided
|
||||
style_hint: Optional[str] = None
|
||||
size: str = "1024x1024"
|
||||
provider: Optional[str] = None # dalle, replicate
|
||||
|
||||
|
||||
# === Affiliate Links ===
|
||||
|
||||
class AffiliateLinkBase(BaseModel):
|
||||
character_id: Optional[int] = None
|
||||
network: str
|
||||
name: str
|
||||
url: str
|
||||
tag: Optional[str] = None
|
||||
topics: list[str] = []
|
||||
is_active: bool = True
|
||||
|
||||
|
||||
class AffiliateLinkCreate(AffiliateLinkBase):
|
||||
pass
|
||||
|
||||
|
||||
class AffiliateLinkUpdate(BaseModel):
|
||||
network: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
tag: Optional[str] = None
|
||||
topics: Optional[list[str]] = None
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
|
||||
class AffiliateLinkResponse(AffiliateLinkBase):
|
||||
id: int
|
||||
click_count: int = 0
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# === Editorial Plans ===
|
||||
|
||||
class EditorialPlanBase(BaseModel):
|
||||
character_id: int
|
||||
name: str
|
||||
frequency: str = "daily"
|
||||
posts_per_day: int = 1
|
||||
platforms: list[str] = []
|
||||
content_types: list[str] = ["text"]
|
||||
posting_times: list[str] = ["09:00"]
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
is_active: bool = False
|
||||
|
||||
|
||||
class EditorialPlanCreate(EditorialPlanBase):
|
||||
pass
|
||||
|
||||
|
||||
class EditorialPlanUpdate(BaseModel):
|
||||
name: Optional[str] = None
|
||||
frequency: Optional[str] = None
|
||||
posts_per_day: Optional[int] = None
|
||||
platforms: Optional[list[str]] = None
|
||||
content_types: Optional[list[str]] = None
|
||||
posting_times: Optional[list[str]] = None
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
|
||||
class EditorialPlanResponse(EditorialPlanBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# === Scheduled Posts ===
|
||||
|
||||
class ScheduledPostCreate(BaseModel):
|
||||
plan_id: Optional[int] = None
|
||||
post_id: int
|
||||
platform: str
|
||||
scheduled_at: datetime
|
||||
|
||||
|
||||
class ScheduledPostResponse(BaseModel):
|
||||
id: int
|
||||
plan_id: Optional[int] = None
|
||||
post_id: int
|
||||
platform: str
|
||||
scheduled_at: datetime
|
||||
published_at: Optional[datetime] = None
|
||||
status: str
|
||||
error_message: Optional[str] = None
|
||||
external_post_id: Optional[str] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# === Social Accounts ===
|
||||
|
||||
class SocialAccountCreate(BaseModel):
|
||||
character_id: int
|
||||
platform: str
|
||||
account_name: Optional[str] = None
|
||||
account_id: Optional[str] = None
|
||||
access_token: Optional[str] = None
|
||||
refresh_token: Optional[str] = None
|
||||
page_id: Optional[str] = None
|
||||
extra_data: dict = {}
|
||||
|
||||
|
||||
class SocialAccountUpdate(BaseModel):
|
||||
account_name: Optional[str] = None
|
||||
access_token: Optional[str] = None
|
||||
refresh_token: Optional[str] = None
|
||||
page_id: Optional[str] = None
|
||||
extra_data: Optional[dict] = None
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
|
||||
class SocialAccountResponse(BaseModel):
|
||||
id: int
|
||||
character_id: int
|
||||
platform: str
|
||||
account_name: Optional[str] = None
|
||||
account_id: Optional[str] = None
|
||||
page_id: Optional[str] = None
|
||||
is_active: bool
|
||||
token_expires_at: Optional[datetime] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# === Comments ===
|
||||
|
||||
class CommentResponse(BaseModel):
|
||||
id: int
|
||||
scheduled_post_id: Optional[int] = None
|
||||
platform: str
|
||||
external_comment_id: Optional[str] = None
|
||||
author_name: Optional[str] = None
|
||||
content: Optional[str] = None
|
||||
ai_suggested_reply: Optional[str] = None
|
||||
approved_reply: Optional[str] = None
|
||||
reply_status: str
|
||||
replied_at: Optional[datetime] = None
|
||||
created_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class CommentAction(BaseModel):
|
||||
action: str # approve, edit, ignore
|
||||
reply_text: Optional[str] = None # for edit action
|
||||
|
||||
|
||||
# === System Settings ===
|
||||
|
||||
class SettingUpdate(BaseModel):
|
||||
key: str
|
||||
value: dict | str | list | int | bool | None
|
||||
|
||||
|
||||
class SettingResponse(BaseModel):
|
||||
key: str
|
||||
value: dict | str | list | int | bool | None
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
# === Editorial Calendar (from PostGenerator) ===
|
||||
|
||||
class CalendarGenerateRequest(BaseModel):
|
||||
topics: list[str]
|
||||
format_narrativo: Optional[str] = None # PAS, AIDA, BAB, Storytelling, Listicle, Dato_Implicazione
|
||||
awareness_level: Optional[int] = None # 1-5 (Schwartz levels)
|
||||
num_posts: int = 7
|
||||
start_date: Optional[str] = None # YYYY-MM-DD
|
||||
character_id: Optional[int] = None
|
||||
|
||||
|
||||
class CalendarSlotResponse(BaseModel):
|
||||
indice: int
|
||||
topic: str
|
||||
formato_narrativo: str
|
||||
awareness_level: int
|
||||
awareness_label: str
|
||||
data_pubblicazione: str
|
||||
note: Optional[str] = None
|
||||
|
||||
|
||||
class CalendarResponse(BaseModel):
|
||||
slots: list[CalendarSlotResponse]
|
||||
totale_post: int
|
||||
0
backend/app/services/__init__.py
Normal file
0
backend/app/services/__init__.py
Normal file
168
backend/app/services/calendar_service.py
Normal file
168
backend/app/services/calendar_service.py
Normal file
@@ -0,0 +1,168 @@
|
||||
"""CalendarService — genera il calendario editoriale con awareness levels (Schwartz).
|
||||
|
||||
Versione adattata per Leopost Full (standalone, senza dipendenze da postgenerator).
|
||||
Genera un piano di pubblicazione con:
|
||||
- Formati narrativi: PAS, AIDA, BAB, Storytelling, Listicle, Dato_Implicazione
|
||||
- Awareness levels (Schwartz): 1-Unaware, 2-Problem Aware, 3-Solution Aware,
|
||||
4-Product Aware, 5-Most Aware
|
||||
- Date di pubblicazione suggerite
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, timedelta
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Costanti
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
FORMATI_NARRATIVI = [
|
||||
"PAS",
|
||||
"AIDA",
|
||||
"BAB",
|
||||
"Storytelling",
|
||||
"Listicle",
|
||||
"Dato_Implicazione",
|
||||
]
|
||||
|
||||
AWARENESS_LEVELS = {
|
||||
1: "Unaware",
|
||||
2: "Problem Aware",
|
||||
3: "Solution Aware",
|
||||
4: "Product Aware",
|
||||
5: "Most Aware",
|
||||
}
|
||||
|
||||
# Mapping formato narrativo -> awareness levels consigliati
|
||||
_FORMATO_TO_LEVELS: dict[str, list[int]] = {
|
||||
"PAS": [2, 3],
|
||||
"AIDA": [3, 4, 5],
|
||||
"BAB": [2, 3],
|
||||
"Storytelling": [1, 2],
|
||||
"Listicle": [2, 3],
|
||||
"Dato_Implicazione": [1, 2, 3],
|
||||
}
|
||||
|
||||
# Distribuzione default per generazione automatica
|
||||
_DEFAULT_DISTRIBUTION = [
|
||||
("Storytelling", 1),
|
||||
("Dato_Implicazione", 2),
|
||||
("PAS", 2),
|
||||
("Listicle", 3),
|
||||
("AIDA", 4),
|
||||
("BAB", 3),
|
||||
("AIDA", 5),
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CalendarService
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class CalendarService:
|
||||
"""Genera il calendario editoriale con awareness levels e formati narrativi."""
|
||||
|
||||
def generate_calendar(
|
||||
self,
|
||||
topics: list[str],
|
||||
num_posts: int = 7,
|
||||
format_narrativo: Optional[str] = None,
|
||||
awareness_level: Optional[int] = None,
|
||||
start_date: Optional[str] = None,
|
||||
) -> list[dict]:
|
||||
"""Genera un calendario editoriale."""
|
||||
if start_date:
|
||||
try:
|
||||
data_inizio = date.fromisoformat(start_date)
|
||||
except ValueError:
|
||||
data_inizio = date.today()
|
||||
else:
|
||||
data_inizio = date.today()
|
||||
|
||||
dates = self._generate_dates(data_inizio, num_posts)
|
||||
|
||||
if format_narrativo and awareness_level:
|
||||
distribution = [(format_narrativo, awareness_level)] * num_posts
|
||||
elif format_narrativo:
|
||||
levels = _FORMATO_TO_LEVELS.get(format_narrativo, [2, 3, 4])
|
||||
distribution = [
|
||||
(format_narrativo, levels[i % len(levels)])
|
||||
for i in range(num_posts)
|
||||
]
|
||||
elif awareness_level:
|
||||
compatible_formats = [
|
||||
fmt for fmt, levels in _FORMATO_TO_LEVELS.items()
|
||||
if awareness_level in levels
|
||||
]
|
||||
if not compatible_formats:
|
||||
compatible_formats = FORMATI_NARRATIVI
|
||||
distribution = [
|
||||
(compatible_formats[i % len(compatible_formats)], awareness_level)
|
||||
for i in range(num_posts)
|
||||
]
|
||||
else:
|
||||
distribution = [
|
||||
_DEFAULT_DISTRIBUTION[i % len(_DEFAULT_DISTRIBUTION)]
|
||||
for i in range(num_posts)
|
||||
]
|
||||
|
||||
slots = []
|
||||
for i in range(num_posts):
|
||||
topic = topics[i % len(topics)] if topics else f"Topic {i + 1}"
|
||||
fmt, level = distribution[i]
|
||||
|
||||
slots.append({
|
||||
"indice": i,
|
||||
"topic": topic,
|
||||
"formato_narrativo": fmt,
|
||||
"awareness_level": level,
|
||||
"awareness_label": AWARENESS_LEVELS.get(level, f"Level {level}"),
|
||||
"data_pubblicazione": dates[i].isoformat(),
|
||||
"note": self._generate_note(fmt, level),
|
||||
})
|
||||
|
||||
return slots
|
||||
|
||||
def get_formats(self) -> list[dict]:
|
||||
"""Ritorna la lista dei formati narrativi disponibili."""
|
||||
return [
|
||||
{
|
||||
"value": fmt,
|
||||
"label": fmt.replace("_", " "),
|
||||
"awareness_levels": _FORMATO_TO_LEVELS.get(fmt, [2, 3]),
|
||||
}
|
||||
for fmt in FORMATI_NARRATIVI
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _generate_dates(start: date, count: int) -> list[date]:
|
||||
"""Genera date di pubblicazione (lun, mer, ven)."""
|
||||
publish_days = [0, 2, 4]
|
||||
dates = []
|
||||
current = start
|
||||
|
||||
while current.weekday() not in publish_days:
|
||||
current += timedelta(days=1)
|
||||
|
||||
while len(dates) < count:
|
||||
if current.weekday() in publish_days:
|
||||
dates.append(current)
|
||||
current += timedelta(days=1)
|
||||
|
||||
return dates
|
||||
|
||||
@staticmethod
|
||||
def _generate_note(formato: str, level: int) -> str:
|
||||
"""Genera una nota descrittiva per lo slot."""
|
||||
level_label = AWARENESS_LEVELS.get(level, f"L{level}")
|
||||
notes = {
|
||||
"PAS": f"Problema-Agitazione-Soluzione. Target: {level_label}",
|
||||
"AIDA": f"Attenzione-Interesse-Desiderio-Azione. Target: {level_label}",
|
||||
"BAB": f"Before-After-Bridge. Target: {level_label}",
|
||||
"Storytelling": f"Racconta una storia autentica. Target: {level_label}",
|
||||
"Listicle": f"Lista di punti pratici. Target: {level_label}",
|
||||
"Dato_Implicazione": f"Dato sorprendente + implicazione. Target: {level_label}",
|
||||
}
|
||||
return notes.get(formato, f"Formato {formato}. Target: {level_label}")
|
||||
202
backend/app/services/content.py
Normal file
202
backend/app/services/content.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Content generation logic for social media posts.
|
||||
|
||||
Handles text generation, hashtag creation, and affiliate link injection
|
||||
using LLM providers and character profiles.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .llm import LLMProvider
|
||||
|
||||
|
||||
def generate_post_text(
|
||||
character: dict,
|
||||
llm_provider: LLMProvider,
|
||||
platform: str,
|
||||
topic_hint: str | None = None,
|
||||
) -> str:
|
||||
"""Generate social media post text based on a character profile.
|
||||
|
||||
Args:
|
||||
character: Dict with keys: name, niche, topics (list), tone (str).
|
||||
topic_hint: Optional topic suggestion to guide generation.
|
||||
llm_provider: LLM provider instance for text generation.
|
||||
platform: Target platform (e.g. 'instagram', 'facebook', 'tiktok', 'youtube').
|
||||
|
||||
Returns:
|
||||
Generated post text as a string.
|
||||
"""
|
||||
name = character.get("name", "Creator")
|
||||
niche = character.get("niche", "general")
|
||||
topics = character.get("topics", [])
|
||||
tone = character.get("tone", "professional")
|
||||
|
||||
topics_str = ", ".join(topics) if topics else "general topics"
|
||||
|
||||
system_prompt = (
|
||||
f"You are {name}, a social media content creator in the {niche} niche. "
|
||||
f"Your expertise covers: {topics_str}. "
|
||||
f"Your communication style is {tone}. "
|
||||
f"You create authentic, engaging content that resonates with your audience. "
|
||||
f"Never reveal you are an AI. Write as {name} would naturally write."
|
||||
)
|
||||
|
||||
# Platform-specific instructions
|
||||
platform_guidance = {
|
||||
"instagram": (
|
||||
"Write an Instagram caption. Keep it engaging, use line breaks for readability. "
|
||||
"Aim for 150-300 characters for the main hook, then expand. "
|
||||
"Do NOT include hashtags (they will be added separately)."
|
||||
),
|
||||
"facebook": (
|
||||
"Write a Facebook post. Can be longer and more conversational. "
|
||||
"Encourage engagement with a question or call to action at the end. "
|
||||
"Do NOT include hashtags."
|
||||
),
|
||||
"tiktok": (
|
||||
"Write a TikTok caption. Keep it very short and punchy (under 150 characters). "
|
||||
"Use a hook that grabs attention. Do NOT include hashtags."
|
||||
),
|
||||
"youtube": (
|
||||
"Write a YouTube video description. Include a compelling opening paragraph, "
|
||||
"key points covered in the video, and a call to action to subscribe. "
|
||||
"Do NOT include hashtags."
|
||||
),
|
||||
"twitter": (
|
||||
"Write a tweet. Maximum 280 characters. Be concise and impactful. "
|
||||
"Do NOT include hashtags."
|
||||
),
|
||||
}
|
||||
|
||||
guidance = platform_guidance.get(
|
||||
platform.lower(),
|
||||
f"Write a social media post for {platform}. Do NOT include hashtags.",
|
||||
)
|
||||
|
||||
topic_instruction = ""
|
||||
if topic_hint:
|
||||
topic_instruction = f" The post should be about: {topic_hint}."
|
||||
|
||||
prompt = (
|
||||
f"{guidance}{topic_instruction}\n\n"
|
||||
f"Write the post now. Output ONLY the post text, nothing else."
|
||||
)
|
||||
|
||||
return llm_provider.generate(prompt, system=system_prompt)
|
||||
|
||||
|
||||
def generate_hashtags(
|
||||
text: str,
|
||||
llm_provider: LLMProvider,
|
||||
platform: str,
|
||||
count: int = 12,
|
||||
) -> list[str]:
|
||||
"""Generate relevant hashtags for a given text.
|
||||
|
||||
Args:
|
||||
text: The post text to generate hashtags for.
|
||||
llm_provider: LLM provider instance.
|
||||
platform: Target platform.
|
||||
count: Number of hashtags to generate.
|
||||
|
||||
Returns:
|
||||
List of hashtag strings (each prefixed with #).
|
||||
"""
|
||||
platform_limits = {
|
||||
"instagram": 30,
|
||||
"tiktok": 5,
|
||||
"twitter": 3,
|
||||
"facebook": 5,
|
||||
"youtube": 15,
|
||||
}
|
||||
max_tags = min(count, platform_limits.get(platform.lower(), count))
|
||||
|
||||
system_prompt = (
|
||||
"You are a social media hashtag strategist. You generate relevant, "
|
||||
"effective hashtags that maximize reach and engagement."
|
||||
)
|
||||
|
||||
prompt = (
|
||||
f"Generate exactly {max_tags} hashtags for the following {platform} post.\n\n"
|
||||
f"Post text:\n{text}\n\n"
|
||||
f"Rules:\n"
|
||||
f"- Mix popular (high reach) and niche (targeted) hashtags\n"
|
||||
f"- Each hashtag must start with #\n"
|
||||
f"- No spaces within hashtags, use CamelCase for multi-word\n"
|
||||
f"- Output ONLY the hashtags, one per line, nothing else"
|
||||
)
|
||||
|
||||
result = llm_provider.generate(prompt, system=system_prompt)
|
||||
|
||||
# Parse hashtags from the response
|
||||
hashtags: list[str] = []
|
||||
for line in result.strip().splitlines():
|
||||
tag = line.strip()
|
||||
if not tag:
|
||||
continue
|
||||
# Ensure it starts with #
|
||||
if not tag.startswith("#"):
|
||||
tag = f"#{tag}"
|
||||
# Remove any trailing punctuation or spaces
|
||||
tag = tag.split()[0] # Take only the first word if extra text
|
||||
hashtags.append(tag)
|
||||
|
||||
return hashtags[:max_tags]
|
||||
|
||||
|
||||
def inject_affiliate_links(
|
||||
text: str,
|
||||
affiliate_links: list[dict],
|
||||
topics: list[str],
|
||||
) -> tuple[str, list[dict]]:
|
||||
"""Find relevant affiliate links and append them to the post text.
|
||||
|
||||
Matches affiliate links based on topic overlap. Links whose keywords
|
||||
overlap with the provided topics are appended naturally at the end.
|
||||
|
||||
Args:
|
||||
text: Original post text.
|
||||
affiliate_links: List of dicts, each with keys:
|
||||
- url (str): The affiliate URL
|
||||
- label (str): Display text for the link
|
||||
- keywords (list[str]): Topic keywords this link is relevant for
|
||||
topics: Current post topics to match against.
|
||||
|
||||
Returns:
|
||||
Tuple of (modified_text, links_used) where links_used is the list
|
||||
of affiliate link dicts that were injected.
|
||||
"""
|
||||
if not affiliate_links or not topics:
|
||||
return text, []
|
||||
|
||||
# Normalize topics to lowercase for matching
|
||||
topics_lower = {t.lower() for t in topics}
|
||||
|
||||
# Score each link by keyword overlap
|
||||
scored_links: list[tuple[int, dict]] = []
|
||||
for link in affiliate_links:
|
||||
keywords = link.get("keywords", [])
|
||||
keywords_lower = {k.lower() for k in keywords}
|
||||
overlap = len(topics_lower & keywords_lower)
|
||||
if overlap > 0:
|
||||
scored_links.append((overlap, link))
|
||||
|
||||
if not scored_links:
|
||||
return text, []
|
||||
|
||||
# Sort by relevance (most overlap first), take top 2
|
||||
scored_links.sort(key=lambda x: x[0], reverse=True)
|
||||
top_links = [link for _, link in scored_links[:2]]
|
||||
|
||||
# Build the links section
|
||||
links_section_parts: list[str] = []
|
||||
for link in top_links:
|
||||
label = link.get("label", "Check this out")
|
||||
url = link.get("url", "")
|
||||
links_section_parts.append(f"{label}: {url}")
|
||||
|
||||
links_text = "\n".join(links_section_parts)
|
||||
modified_text = f"{text}\n\n{links_text}"
|
||||
|
||||
return modified_text, top_links
|
||||
181
backend/app/services/images.py
Normal file
181
backend/app/services/images.py
Normal file
@@ -0,0 +1,181 @@
|
||||
"""
|
||||
Image generation abstraction layer.
|
||||
|
||||
Supports DALL-E (OpenAI) and Replicate (Stability AI SDXL) for image generation.
|
||||
"""
|
||||
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import httpx
|
||||
|
||||
TIMEOUT = 120.0
|
||||
POLL_INTERVAL = 2.0
|
||||
MAX_POLL_ATTEMPTS = 60
|
||||
|
||||
|
||||
class ImageProvider(ABC):
|
||||
"""Abstract base class for image generation providers."""
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
self.api_key = api_key
|
||||
self.model = model
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, prompt: str, size: str = "1024x1024") -> str:
|
||||
"""Generate an image from a text prompt.
|
||||
|
||||
Args:
|
||||
prompt: Text description of the image to generate.
|
||||
size: Image dimensions as 'WIDTHxHEIGHT' string.
|
||||
|
||||
Returns:
|
||||
URL of the generated image.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class DallEProvider(ImageProvider):
|
||||
"""OpenAI DALL-E 3 image generation provider."""
|
||||
|
||||
API_URL = "https://api.openai.com/v1/images/generations"
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
super().__init__(api_key, model or "dall-e-3")
|
||||
|
||||
def generate(self, prompt: str, size: str = "1024x1024") -> str:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"model": self.model,
|
||||
"prompt": prompt,
|
||||
"n": 1,
|
||||
"size": size,
|
||||
"response_format": "url",
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(self.API_URL, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return data["data"][0]["url"]
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"DALL-E API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"DALL-E API request failed: {e}") from e
|
||||
|
||||
|
||||
class ReplicateProvider(ImageProvider):
|
||||
"""Replicate image generation provider using Stability AI SDXL."""
|
||||
|
||||
API_URL = "https://api.replicate.com/v1/predictions"
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
super().__init__(api_key, model or "stability-ai/sdxl:latest")
|
||||
|
||||
def generate(self, prompt: str, size: str = "1024x1024") -> str:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
# Parse size into width and height
|
||||
try:
|
||||
width, height = (int(d) for d in size.split("x"))
|
||||
except ValueError:
|
||||
width, height = 1024, 1024
|
||||
|
||||
# Determine the version string from the model
|
||||
# Replicate expects "owner/model:version" or uses the version hash directly
|
||||
version = self.model
|
||||
payload = {
|
||||
"version": version,
|
||||
"input": {
|
||||
"prompt": prompt,
|
||||
"width": width,
|
||||
"height": height,
|
||||
},
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
# Create prediction
|
||||
response = client.post(self.API_URL, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
prediction = response.json()
|
||||
|
||||
prediction_url = prediction.get("urls", {}).get("get")
|
||||
if not prediction_url:
|
||||
prediction_id = prediction.get("id")
|
||||
prediction_url = f"{self.API_URL}/{prediction_id}"
|
||||
|
||||
# Poll for completion
|
||||
for _ in range(MAX_POLL_ATTEMPTS):
|
||||
poll_response = client.get(prediction_url, headers=headers)
|
||||
poll_response.raise_for_status()
|
||||
result = poll_response.json()
|
||||
status = result.get("status")
|
||||
|
||||
if status == "succeeded":
|
||||
output = result.get("output")
|
||||
if isinstance(output, list) and output:
|
||||
return output[0]
|
||||
if isinstance(output, str):
|
||||
return output
|
||||
raise RuntimeError(
|
||||
f"Replicate returned unexpected output format: {output}"
|
||||
)
|
||||
|
||||
if status == "failed":
|
||||
error = result.get("error", "Unknown error")
|
||||
raise RuntimeError(f"Replicate prediction failed: {error}")
|
||||
|
||||
if status == "canceled":
|
||||
raise RuntimeError("Replicate prediction was canceled")
|
||||
|
||||
time.sleep(POLL_INTERVAL)
|
||||
|
||||
raise RuntimeError(
|
||||
"Replicate prediction timed out after polling"
|
||||
)
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"Replicate API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"Replicate API request failed: {e}") from e
|
||||
|
||||
|
||||
def get_image_provider(
|
||||
provider_name: str, api_key: str, model: str | None = None
|
||||
) -> ImageProvider:
|
||||
"""Factory function to get an image generation provider instance.
|
||||
|
||||
Args:
|
||||
provider_name: One of 'dalle', 'replicate'.
|
||||
api_key: API key for the provider.
|
||||
model: Optional model override. Uses default if not specified.
|
||||
|
||||
Returns:
|
||||
An ImageProvider instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If provider_name is not supported.
|
||||
"""
|
||||
providers = {
|
||||
"dalle": DallEProvider,
|
||||
"replicate": ReplicateProvider,
|
||||
}
|
||||
provider_cls = providers.get(provider_name.lower())
|
||||
if provider_cls is None:
|
||||
supported = ", ".join(providers.keys())
|
||||
raise ValueError(
|
||||
f"Unknown image provider '{provider_name}'. Supported: {supported}"
|
||||
)
|
||||
return provider_cls(api_key=api_key, model=model)
|
||||
194
backend/app/services/llm.py
Normal file
194
backend/app/services/llm.py
Normal file
@@ -0,0 +1,194 @@
|
||||
"""
|
||||
Multi-LLM abstraction layer.
|
||||
|
||||
Supports Claude (Anthropic), OpenAI, and Gemini via direct HTTP calls using httpx.
|
||||
Each provider implements the same interface for text generation.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import httpx
|
||||
|
||||
# Default models per provider
|
||||
DEFAULT_MODELS = {
|
||||
"claude": "claude-sonnet-4-20250514",
|
||||
"openai": "gpt-4o-mini",
|
||||
"gemini": "gemini-2.0-flash",
|
||||
}
|
||||
|
||||
TIMEOUT = 60.0
|
||||
|
||||
|
||||
class LLMProvider(ABC):
|
||||
"""Abstract base class for LLM providers."""
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
self.api_key = api_key
|
||||
self.model = model
|
||||
|
||||
@abstractmethod
|
||||
def generate(self, prompt: str, system: str = "") -> str:
|
||||
"""Generate text from a prompt.
|
||||
|
||||
Args:
|
||||
prompt: The user prompt / message.
|
||||
system: Optional system prompt for context and behavior.
|
||||
|
||||
Returns:
|
||||
Generated text string.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class ClaudeProvider(LLMProvider):
|
||||
"""Anthropic Claude provider via Messages API."""
|
||||
|
||||
API_URL = "https://api.anthropic.com/v1/messages"
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
super().__init__(api_key, model or DEFAULT_MODELS["claude"])
|
||||
|
||||
def generate(self, prompt: str, system: str = "") -> str:
|
||||
headers = {
|
||||
"x-api-key": self.api_key,
|
||||
"anthropic-version": "2023-06-01",
|
||||
"content-type": "application/json",
|
||||
}
|
||||
payload: dict = {
|
||||
"model": self.model,
|
||||
"max_tokens": 2048,
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
}
|
||||
if system:
|
||||
payload["system"] = system
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(self.API_URL, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
# Claude returns content as a list of content blocks
|
||||
content_blocks = data.get("content", [])
|
||||
return "".join(
|
||||
block.get("text", "") for block in content_blocks if block.get("type") == "text"
|
||||
)
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"Claude API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"Claude API request failed: {e}") from e
|
||||
|
||||
|
||||
class OpenAIProvider(LLMProvider):
|
||||
"""OpenAI provider via Chat Completions API."""
|
||||
|
||||
API_URL = "https://api.openai.com/v1/chat/completions"
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
super().__init__(api_key, model or DEFAULT_MODELS["openai"])
|
||||
|
||||
def generate(self, prompt: str, system: str = "") -> str:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
messages: list[dict] = []
|
||||
if system:
|
||||
messages.append({"role": "system", "content": system})
|
||||
messages.append({"role": "user", "content": prompt})
|
||||
|
||||
payload = {
|
||||
"model": self.model,
|
||||
"messages": messages,
|
||||
"max_tokens": 2048,
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(self.API_URL, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return data["choices"][0]["message"]["content"]
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"OpenAI API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"OpenAI API request failed: {e}") from e
|
||||
|
||||
|
||||
class GeminiProvider(LLMProvider):
|
||||
"""Google Gemini provider via Generative Language API."""
|
||||
|
||||
API_BASE = "https://generativelanguage.googleapis.com/v1beta/models"
|
||||
|
||||
def __init__(self, api_key: str, model: str | None = None):
|
||||
super().__init__(api_key, model or DEFAULT_MODELS["gemini"])
|
||||
|
||||
def generate(self, prompt: str, system: str = "") -> str:
|
||||
url = f"{self.API_BASE}/{self.model}:generateContent"
|
||||
params = {"key": self.api_key}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
# Build contents; Gemini uses a parts-based structure
|
||||
parts: list[dict] = []
|
||||
if system:
|
||||
parts.append({"text": f"{system}\n\n{prompt}"})
|
||||
else:
|
||||
parts.append({"text": prompt})
|
||||
|
||||
payload = {
|
||||
"contents": [{"parts": parts}],
|
||||
"generationConfig": {
|
||||
"maxOutputTokens": 2048,
|
||||
},
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(url, params=params, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
candidates = data.get("candidates", [])
|
||||
if not candidates:
|
||||
return ""
|
||||
content = candidates[0].get("content", {})
|
||||
parts_out = content.get("parts", [])
|
||||
return "".join(part.get("text", "") for part in parts_out)
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"Gemini API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"Gemini API request failed: {e}") from e
|
||||
|
||||
|
||||
def get_llm_provider(
|
||||
provider_name: str, api_key: str, model: str | None = None
|
||||
) -> LLMProvider:
|
||||
"""Factory function to get an LLM provider instance.
|
||||
|
||||
Args:
|
||||
provider_name: One of 'claude', 'openai', 'gemini'.
|
||||
api_key: API key for the provider.
|
||||
model: Optional model override. Uses default if not specified.
|
||||
|
||||
Returns:
|
||||
An LLMProvider instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If provider_name is not supported.
|
||||
"""
|
||||
providers = {
|
||||
"claude": ClaudeProvider,
|
||||
"openai": OpenAIProvider,
|
||||
"gemini": GeminiProvider,
|
||||
}
|
||||
provider_cls = providers.get(provider_name.lower())
|
||||
if provider_cls is None:
|
||||
supported = ", ".join(providers.keys())
|
||||
raise ValueError(
|
||||
f"Unknown LLM provider '{provider_name}'. Supported: {supported}"
|
||||
)
|
||||
return provider_cls(api_key=api_key, model=model)
|
||||
169
backend/app/services/prompt_service.py
Normal file
169
backend/app/services/prompt_service.py
Normal file
@@ -0,0 +1,169 @@
|
||||
"""PromptService — carica, lista e compila prompt .txt con variabili.
|
||||
|
||||
Gestisce i file .txt dei prompt LLM nella directory PROMPTS_PATH.
|
||||
Usa la sintassi {{variabile}} per i placeholder (doppia graffa).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# Pattern per trovare le variabili {{nome}} nei template
|
||||
_VARIABLE_PATTERN = re.compile(r"\{\{(\w+)\}\}")
|
||||
|
||||
|
||||
class PromptService:
|
||||
"""Servizio per gestire i prompt .txt del sistema di generazione.
|
||||
|
||||
Fornisce metodi per:
|
||||
- Elencare i prompt disponibili
|
||||
- Caricare il contenuto di un prompt
|
||||
- Compilare un prompt sostituendo le variabili {{...}}
|
||||
- Salvare un prompt (per l'editor di Phase 2)
|
||||
- Estrarre la lista di variabili richieste da un template
|
||||
"""
|
||||
|
||||
def __init__(self, prompts_dir: Path) -> None:
|
||||
"""Inizializza il servizio con la directory dei prompt.
|
||||
|
||||
Args:
|
||||
prompts_dir: Path alla directory contenente i file .txt dei prompt.
|
||||
Tipicamente PROMPTS_PATH da backend.config.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: Se la directory non esiste.
|
||||
"""
|
||||
if not prompts_dir.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Directory prompt non trovata: {prompts_dir}. "
|
||||
"Verifica che PROMPTS_PATH sia configurato correttamente."
|
||||
)
|
||||
if not prompts_dir.is_dir():
|
||||
raise NotADirectoryError(
|
||||
f"Il percorso non è una directory: {prompts_dir}"
|
||||
)
|
||||
self._prompts_dir = prompts_dir
|
||||
|
||||
def list_prompts(self) -> list[str]:
|
||||
"""Elenca tutti i prompt .txt disponibili nella directory.
|
||||
|
||||
Returns:
|
||||
Lista di nomi file senza estensione, ordinata alfabeticamente.
|
||||
Es: ['aida_promozione', 'bab_storytelling', 'system_prompt', ...]
|
||||
"""
|
||||
return sorted(
|
||||
p.stem for p in self._prompts_dir.glob("*.txt") if p.is_file()
|
||||
)
|
||||
|
||||
def load_prompt(self, name: str) -> str:
|
||||
"""Carica il contenuto grezzo di un prompt .txt.
|
||||
|
||||
Args:
|
||||
name: Nome del prompt senza estensione (es. "pas_valore")
|
||||
|
||||
Returns:
|
||||
Contenuto testuale del file prompt
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: Se il file non esiste
|
||||
"""
|
||||
path = self._get_path(name)
|
||||
if not path.exists():
|
||||
available = self.list_prompts()
|
||||
raise FileNotFoundError(
|
||||
f"Prompt '{name}' non trovato in {self._prompts_dir}. "
|
||||
f"Prompt disponibili: {available}"
|
||||
)
|
||||
return path.read_text(encoding="utf-8")
|
||||
|
||||
def compile_prompt(self, name: str, variables: dict[str, str]) -> str:
|
||||
"""Carica un prompt e sostituisce tutte le variabili {{nome}} con i valori forniti.
|
||||
|
||||
Args:
|
||||
name: Nome del prompt senza estensione
|
||||
variables: Dizionario { nome_variabile: valore }
|
||||
|
||||
Returns:
|
||||
Testo del prompt con tutte le variabili sostituite
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: Se il prompt non esiste
|
||||
ValueError: Se una variabile nel template non ha corrispondenza nel dict
|
||||
"""
|
||||
template = self.load_prompt(name)
|
||||
|
||||
# Verifica che tutte le variabili del template siano nel dict
|
||||
required = set(_VARIABLE_PATTERN.findall(template))
|
||||
provided = set(variables.keys())
|
||||
missing = required - provided
|
||||
if missing:
|
||||
raise ValueError(
|
||||
f"Variabili mancanti per il prompt '{name}': {sorted(missing)}. "
|
||||
f"Fornire: {sorted(required)}"
|
||||
)
|
||||
|
||||
def replace_var(match: re.Match) -> str:
|
||||
var_name = match.group(1)
|
||||
return variables[var_name]
|
||||
|
||||
return _VARIABLE_PATTERN.sub(replace_var, template)
|
||||
|
||||
def save_prompt(self, name: str, content: str) -> None:
|
||||
"""Salva il contenuto di un prompt nel file .txt.
|
||||
|
||||
Usato dall'editor di prompt in Phase 2.
|
||||
|
||||
Args:
|
||||
name: Nome del prompt senza estensione
|
||||
content: Contenuto testuale da salvare
|
||||
|
||||
Raises:
|
||||
ValueError: Se il nome contiene caratteri non sicuri
|
||||
"""
|
||||
# Sicurezza: validazione nome file (solo lettere, cifre, underscore, trattino)
|
||||
if not re.match(r"^[\w\-]+$", name):
|
||||
raise ValueError(
|
||||
f"Nome prompt non valido: '{name}'. "
|
||||
"Usa solo lettere, cifre, underscore e trattino."
|
||||
)
|
||||
path = self._get_path(name)
|
||||
path.write_text(content, encoding="utf-8")
|
||||
|
||||
def get_required_variables(self, name: str) -> list[str]:
|
||||
"""Analizza il template e ritorna la lista delle variabili richieste.
|
||||
|
||||
Args:
|
||||
name: Nome del prompt senza estensione
|
||||
|
||||
Returns:
|
||||
Lista ordinata di nomi variabile (senza doppie graffe)
|
||||
Es: ['brand_name', 'livello_schwartz', 'obiettivo_campagna', 'target_nicchia', 'topic']
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: Se il prompt non esiste
|
||||
"""
|
||||
template = self.load_prompt(name)
|
||||
variables = sorted(set(_VARIABLE_PATTERN.findall(template)))
|
||||
return variables
|
||||
|
||||
def prompt_exists(self, name: str) -> bool:
|
||||
"""Verifica se un prompt esiste.
|
||||
|
||||
Args:
|
||||
name: Nome del prompt senza estensione
|
||||
|
||||
Returns:
|
||||
True se il file esiste
|
||||
"""
|
||||
return self._get_path(name).exists()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Metodi privati
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _get_path(self, name: str) -> Path:
|
||||
"""Costruisce il percorso completo per un file prompt."""
|
||||
return self._prompts_dir / f"{name}.txt"
|
||||
706
backend/app/services/social.py
Normal file
706
backend/app/services/social.py
Normal file
@@ -0,0 +1,706 @@
|
||||
"""
|
||||
Social media publishing abstraction layer.
|
||||
|
||||
Supports Facebook, Instagram, YouTube, and TikTok via their respective APIs.
|
||||
All HTTP calls use httpx (sync).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import httpx
|
||||
|
||||
TIMEOUT = 120.0
|
||||
UPLOAD_TIMEOUT = 300.0
|
||||
|
||||
|
||||
class SocialPublisher(ABC):
|
||||
"""Abstract base class for social media publishers."""
|
||||
|
||||
@abstractmethod
|
||||
def publish_text(self, text: str, **kwargs) -> str:
|
||||
"""Publish a text-only post.
|
||||
|
||||
Returns:
|
||||
External post ID from the platform.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def publish_image(self, text: str, image_url: str, **kwargs) -> str:
|
||||
"""Publish an image post with caption.
|
||||
|
||||
Returns:
|
||||
External post ID from the platform.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def publish_video(self, text: str, video_path: str, **kwargs) -> str:
|
||||
"""Publish a video post with caption.
|
||||
|
||||
Returns:
|
||||
External post ID from the platform.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def get_comments(self, post_id: str) -> list[dict]:
|
||||
"""Get comments on a post.
|
||||
|
||||
Returns:
|
||||
List of comment dicts with at least 'id', 'text', 'author' keys.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def reply_to_comment(self, comment_id: str, text: str) -> bool:
|
||||
"""Reply to a specific comment.
|
||||
|
||||
Returns:
|
||||
True if reply was successful, False otherwise.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class FacebookPublisher(SocialPublisher):
|
||||
"""Facebook Page publishing via Graph API.
|
||||
|
||||
Required API setup:
|
||||
- Create a Facebook App at https://developers.facebook.com
|
||||
- Request 'pages_manage_posts', 'pages_read_engagement' permissions
|
||||
- Get a Page Access Token (long-lived recommended)
|
||||
- The page_id is the Facebook Page ID (numeric)
|
||||
"""
|
||||
|
||||
GRAPH_API_BASE = "https://graph.facebook.com/v18.0"
|
||||
|
||||
def __init__(self, access_token: str, page_id: str):
|
||||
self.access_token = access_token
|
||||
self.page_id = page_id
|
||||
|
||||
def _request(
|
||||
self,
|
||||
method: str,
|
||||
endpoint: str,
|
||||
params: dict | None = None,
|
||||
data: dict | None = None,
|
||||
files: dict | None = None,
|
||||
timeout: float = TIMEOUT,
|
||||
) -> dict:
|
||||
"""Make a request to the Facebook Graph API."""
|
||||
url = f"{self.GRAPH_API_BASE}{endpoint}"
|
||||
if params is None:
|
||||
params = {}
|
||||
params["access_token"] = self.access_token
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
if method == "GET":
|
||||
response = client.get(url, params=params)
|
||||
elif files:
|
||||
response = client.post(url, params=params, data=data, files=files)
|
||||
else:
|
||||
response = client.post(url, params=params, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"Facebook API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"Facebook API request failed: {e}") from e
|
||||
|
||||
def publish_text(self, text: str, **kwargs) -> str:
|
||||
data = {"message": text}
|
||||
result = self._request("POST", f"/{self.page_id}/feed", data=data)
|
||||
return result.get("id", "")
|
||||
|
||||
def publish_image(self, text: str, image_url: str, **kwargs) -> str:
|
||||
data = {
|
||||
"message": text,
|
||||
"url": image_url,
|
||||
}
|
||||
result = self._request("POST", f"/{self.page_id}/photos", data=data)
|
||||
return result.get("id", "")
|
||||
|
||||
def publish_video(self, text: str, video_path: str, **kwargs) -> str:
|
||||
with open(video_path, "rb") as video_file:
|
||||
files = {"source": ("video.mp4", video_file, "video/mp4")}
|
||||
form_data = {"description": text}
|
||||
result = self._request(
|
||||
"POST",
|
||||
f"/{self.page_id}/videos",
|
||||
data=form_data,
|
||||
files=files,
|
||||
timeout=UPLOAD_TIMEOUT,
|
||||
)
|
||||
return result.get("id", "")
|
||||
|
||||
def get_comments(self, post_id: str) -> list[dict]:
|
||||
result = self._request("GET", f"/{post_id}/comments")
|
||||
comments = []
|
||||
for item in result.get("data", []):
|
||||
comments.append({
|
||||
"id": item.get("id", ""),
|
||||
"text": item.get("message", ""),
|
||||
"author": item.get("from", {}).get("name", "Unknown"),
|
||||
"created_at": item.get("created_time", ""),
|
||||
})
|
||||
return comments
|
||||
|
||||
def reply_to_comment(self, comment_id: str, text: str) -> bool:
|
||||
try:
|
||||
self._request("POST", f"/{comment_id}/comments", data={"message": text})
|
||||
return True
|
||||
except RuntimeError:
|
||||
return False
|
||||
|
||||
|
||||
class InstagramPublisher(SocialPublisher):
|
||||
"""Instagram publishing via Instagram Graph API (Business/Creator accounts).
|
||||
|
||||
Required API setup:
|
||||
- Facebook App with Instagram Graph API enabled
|
||||
- Instagram Business or Creator account linked to a Facebook Page
|
||||
- Permissions: 'instagram_basic', 'instagram_content_publish'
|
||||
- ig_user_id is the Instagram Business Account ID (from Facebook Graph API)
|
||||
- Note: Text-only posts are not supported by Instagram API
|
||||
"""
|
||||
|
||||
GRAPH_API_BASE = "https://graph.facebook.com/v18.0"
|
||||
|
||||
def __init__(self, access_token: str, ig_user_id: str):
|
||||
self.access_token = access_token
|
||||
self.ig_user_id = ig_user_id
|
||||
|
||||
def _request(
|
||||
self,
|
||||
method: str,
|
||||
endpoint: str,
|
||||
params: dict | None = None,
|
||||
data: dict | None = None,
|
||||
timeout: float = TIMEOUT,
|
||||
) -> dict:
|
||||
"""Make a request to the Instagram Graph API."""
|
||||
url = f"{self.GRAPH_API_BASE}{endpoint}"
|
||||
if params is None:
|
||||
params = {}
|
||||
params["access_token"] = self.access_token
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=timeout) as client:
|
||||
if method == "GET":
|
||||
response = client.get(url, params=params)
|
||||
else:
|
||||
response = client.post(url, params=params, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"Instagram API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"Instagram API request failed: {e}") from e
|
||||
|
||||
def publish_text(self, text: str, **kwargs) -> str:
|
||||
"""Instagram does not support text-only posts.
|
||||
|
||||
Raises RuntimeError. Use publish_image or publish_video instead.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"Instagram does not support text-only posts. "
|
||||
"Use publish_image() or publish_video() instead."
|
||||
)
|
||||
|
||||
def publish_image(self, text: str, image_url: str, **kwargs) -> str:
|
||||
# Step 1: Create media container
|
||||
container_data = {
|
||||
"image_url": image_url,
|
||||
"caption": text,
|
||||
}
|
||||
container = self._request(
|
||||
"POST", f"/{self.ig_user_id}/media", data=container_data
|
||||
)
|
||||
container_id = container.get("id", "")
|
||||
|
||||
if not container_id:
|
||||
raise RuntimeError("Failed to create Instagram media container")
|
||||
|
||||
# Step 2: Publish the container
|
||||
publish_data = {"creation_id": container_id}
|
||||
result = self._request(
|
||||
"POST", f"/{self.ig_user_id}/media_publish", data=publish_data
|
||||
)
|
||||
return result.get("id", "")
|
||||
|
||||
def publish_video(self, text: str, video_path: str, **kwargs) -> str:
|
||||
"""Publish a video as a Reel on Instagram.
|
||||
|
||||
Note: video_path should be a publicly accessible URL for Instagram API.
|
||||
For local files, upload to a hosting service first and pass the URL.
|
||||
"""
|
||||
video_url = kwargs.get("video_url", video_path)
|
||||
|
||||
# Step 1: Create media container for Reel
|
||||
container_data = {
|
||||
"media_type": "REELS",
|
||||
"video_url": video_url,
|
||||
"caption": text,
|
||||
}
|
||||
container = self._request(
|
||||
"POST", f"/{self.ig_user_id}/media", data=container_data
|
||||
)
|
||||
container_id = container.get("id", "")
|
||||
|
||||
if not container_id:
|
||||
raise RuntimeError("Failed to create Instagram video container")
|
||||
|
||||
# Step 2: Wait for video processing (poll status)
|
||||
for _ in range(60):
|
||||
status = self._request(
|
||||
"GET",
|
||||
f"/{container_id}",
|
||||
params={"fields": "status_code"},
|
||||
)
|
||||
status_code = status.get("status_code", "")
|
||||
if status_code == "FINISHED":
|
||||
break
|
||||
if status_code == "ERROR":
|
||||
raise RuntimeError("Instagram video processing failed")
|
||||
time.sleep(5)
|
||||
else:
|
||||
raise RuntimeError("Instagram video processing timed out")
|
||||
|
||||
# Step 3: Publish
|
||||
publish_data = {"creation_id": container_id}
|
||||
result = self._request(
|
||||
"POST", f"/{self.ig_user_id}/media_publish", data=publish_data
|
||||
)
|
||||
return result.get("id", "")
|
||||
|
||||
def get_comments(self, post_id: str) -> list[dict]:
|
||||
result = self._request(
|
||||
"GET",
|
||||
f"/{post_id}/comments",
|
||||
params={"fields": "id,text,username,timestamp"},
|
||||
)
|
||||
comments = []
|
||||
for item in result.get("data", []):
|
||||
comments.append({
|
||||
"id": item.get("id", ""),
|
||||
"text": item.get("text", ""),
|
||||
"author": item.get("username", "Unknown"),
|
||||
"created_at": item.get("timestamp", ""),
|
||||
})
|
||||
return comments
|
||||
|
||||
def reply_to_comment(self, comment_id: str, text: str) -> bool:
|
||||
try:
|
||||
self._request(
|
||||
"POST",
|
||||
f"/{comment_id}/replies",
|
||||
data={"message": text},
|
||||
)
|
||||
return True
|
||||
except RuntimeError:
|
||||
return False
|
||||
|
||||
|
||||
class YouTubePublisher(SocialPublisher):
|
||||
"""YouTube publishing via YouTube Data API v3.
|
||||
|
||||
Required API setup:
|
||||
- Google Cloud project with YouTube Data API v3 enabled
|
||||
- OAuth 2.0 credentials (access_token from OAuth flow)
|
||||
- Scopes: 'https://www.googleapis.com/auth/youtube.upload',
|
||||
'https://www.googleapis.com/auth/youtube.force-ssl'
|
||||
- Note: Uploads require OAuth, not just an API key
|
||||
"""
|
||||
|
||||
API_BASE = "https://www.googleapis.com"
|
||||
UPLOAD_URL = "https://www.googleapis.com/upload/youtube/v3/videos"
|
||||
|
||||
def __init__(self, access_token: str):
|
||||
self.access_token = access_token
|
||||
|
||||
def _headers(self) -> dict:
|
||||
return {"Authorization": f"Bearer {self.access_token}"}
|
||||
|
||||
def publish_text(self, text: str, **kwargs) -> str:
|
||||
"""YouTube does not support text-only posts via Data API.
|
||||
|
||||
Consider using YouTube Community Posts API if available.
|
||||
"""
|
||||
raise RuntimeError(
|
||||
"YouTube does not support text-only posts via the Data API. "
|
||||
"Use publish_video() instead."
|
||||
)
|
||||
|
||||
def publish_image(self, text: str, image_url: str, **kwargs) -> str:
|
||||
"""YouTube does not support image-only posts via Data API."""
|
||||
raise RuntimeError(
|
||||
"YouTube does not support image posts via the Data API. "
|
||||
"Use publish_video() instead."
|
||||
)
|
||||
|
||||
def publish_video(self, text: str, video_path: str, **kwargs) -> str:
|
||||
"""Upload a video to YouTube using resumable upload.
|
||||
|
||||
Args:
|
||||
text: Video description.
|
||||
video_path: Path to the video file.
|
||||
**kwargs: Additional options:
|
||||
- title (str): Video title (default: first 100 chars of text)
|
||||
- tags (list[str]): Video tags
|
||||
- privacy (str): 'public', 'unlisted', or 'private' (default: 'public')
|
||||
- category_id (str): YouTube category ID (default: '22' for People & Blogs)
|
||||
"""
|
||||
title = kwargs.get("title", text[:100])
|
||||
tags = kwargs.get("tags", [])
|
||||
privacy = kwargs.get("privacy", "public")
|
||||
category_id = kwargs.get("category_id", "22")
|
||||
|
||||
# Step 1: Initialize resumable upload
|
||||
metadata = {
|
||||
"snippet": {
|
||||
"title": title,
|
||||
"description": text,
|
||||
"tags": tags,
|
||||
"categoryId": category_id,
|
||||
},
|
||||
"status": {
|
||||
"privacyStatus": privacy,
|
||||
},
|
||||
}
|
||||
|
||||
headers = self._headers()
|
||||
headers["Content-Type"] = "application/json"
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=UPLOAD_TIMEOUT) as client:
|
||||
# Init resumable upload
|
||||
init_response = client.post(
|
||||
self.UPLOAD_URL,
|
||||
params={
|
||||
"uploadType": "resumable",
|
||||
"part": "snippet,status",
|
||||
},
|
||||
headers=headers,
|
||||
json=metadata,
|
||||
)
|
||||
init_response.raise_for_status()
|
||||
|
||||
upload_url = init_response.headers.get("location")
|
||||
if not upload_url:
|
||||
raise RuntimeError(
|
||||
"YouTube API did not return a resumable upload URL"
|
||||
)
|
||||
|
||||
# Step 2: Upload the video file
|
||||
with open(video_path, "rb") as video_file:
|
||||
video_data = video_file.read()
|
||||
|
||||
upload_headers = {
|
||||
"Authorization": f"Bearer {self.access_token}",
|
||||
"Content-Type": "video/*",
|
||||
"Content-Length": str(len(video_data)),
|
||||
}
|
||||
|
||||
upload_response = client.put(
|
||||
upload_url,
|
||||
headers=upload_headers,
|
||||
content=video_data,
|
||||
)
|
||||
upload_response.raise_for_status()
|
||||
result = upload_response.json()
|
||||
return result.get("id", "")
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"YouTube API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"YouTube API request failed: {e}") from e
|
||||
|
||||
def get_comments(self, post_id: str) -> list[dict]:
|
||||
"""Get comment threads for a video.
|
||||
|
||||
Args:
|
||||
post_id: YouTube video ID.
|
||||
"""
|
||||
url = f"{self.API_BASE}/youtube/v3/commentThreads"
|
||||
params = {
|
||||
"part": "snippet",
|
||||
"videoId": post_id,
|
||||
"maxResults": 100,
|
||||
"order": "time",
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.get(url, headers=self._headers(), params=params)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"YouTube API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"YouTube API request failed: {e}") from e
|
||||
|
||||
comments = []
|
||||
for item in data.get("items", []):
|
||||
snippet = item.get("snippet", {}).get("topLevelComment", {}).get("snippet", {})
|
||||
comments.append({
|
||||
"id": item.get("snippet", {}).get("topLevelComment", {}).get("id", ""),
|
||||
"text": snippet.get("textDisplay", ""),
|
||||
"author": snippet.get("authorDisplayName", "Unknown"),
|
||||
"created_at": snippet.get("publishedAt", ""),
|
||||
})
|
||||
return comments
|
||||
|
||||
def reply_to_comment(self, comment_id: str, text: str) -> bool:
|
||||
"""Reply to a YouTube comment.
|
||||
|
||||
Args:
|
||||
comment_id: The parent comment ID to reply to.
|
||||
text: Reply text.
|
||||
"""
|
||||
url = f"{self.API_BASE}/youtube/v3/comments"
|
||||
params = {"part": "snippet"}
|
||||
payload = {
|
||||
"snippet": {
|
||||
"parentId": comment_id,
|
||||
"textOriginal": text,
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(
|
||||
url, headers=self._headers(), params=params, json=payload
|
||||
)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except (httpx.HTTPStatusError, httpx.RequestError):
|
||||
return False
|
||||
|
||||
|
||||
class TikTokPublisher(SocialPublisher):
|
||||
"""TikTok publishing via Content Posting API.
|
||||
|
||||
Required API setup:
|
||||
- Register app at https://developers.tiktok.com
|
||||
- Apply for 'Content Posting API' access
|
||||
- OAuth 2.0 flow to get access_token
|
||||
- Scopes: 'video.publish', 'video.upload'
|
||||
- Note: TikTok API access requires app review and approval
|
||||
- Text-only and image posts are not supported via API
|
||||
"""
|
||||
|
||||
API_BASE = "https://open.tiktokapis.com/v2"
|
||||
|
||||
def __init__(self, access_token: str):
|
||||
self.access_token = access_token
|
||||
|
||||
def _headers(self) -> dict:
|
||||
return {
|
||||
"Authorization": f"Bearer {self.access_token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
def publish_text(self, text: str, **kwargs) -> str:
|
||||
"""TikTok does not support text-only posts via API."""
|
||||
raise RuntimeError(
|
||||
"TikTok does not support text-only posts via the Content Posting API. "
|
||||
"Use publish_video() instead."
|
||||
)
|
||||
|
||||
def publish_image(self, text: str, image_url: str, **kwargs) -> str:
|
||||
"""TikTok image posting is limited. Use publish_video instead."""
|
||||
raise RuntimeError(
|
||||
"TikTok image posting is not widely supported via the API. "
|
||||
"Use publish_video() instead."
|
||||
)
|
||||
|
||||
def publish_video(self, text: str, video_path: str, **kwargs) -> str:
|
||||
"""Publish a video to TikTok using the Content Posting API.
|
||||
|
||||
Args:
|
||||
text: Video caption/description.
|
||||
video_path: Path to the video file.
|
||||
**kwargs: Additional options:
|
||||
- privacy_level (str): 'PUBLIC_TO_EVERYONE', 'MUTUAL_FOLLOW_FRIENDS',
|
||||
'FOLLOWER_OF_CREATOR', 'SELF_ONLY' (default: 'PUBLIC_TO_EVERYONE')
|
||||
- disable_comment (bool): Disable comments (default: False)
|
||||
- disable_duet (bool): Disable duet (default: False)
|
||||
- disable_stitch (bool): Disable stitch (default: False)
|
||||
"""
|
||||
privacy_level = kwargs.get("privacy_level", "PUBLIC_TO_EVERYONE")
|
||||
disable_comment = kwargs.get("disable_comment", False)
|
||||
disable_duet = kwargs.get("disable_duet", False)
|
||||
disable_stitch = kwargs.get("disable_stitch", False)
|
||||
|
||||
# Get file size for chunk upload
|
||||
import os
|
||||
|
||||
file_size = os.path.getsize(video_path)
|
||||
|
||||
# Step 1: Initialize video upload
|
||||
init_url = f"{self.API_BASE}/post/publish/video/init/"
|
||||
init_payload = {
|
||||
"post_info": {
|
||||
"title": text[:150], # TikTok title limit
|
||||
"privacy_level": privacy_level,
|
||||
"disable_comment": disable_comment,
|
||||
"disable_duet": disable_duet,
|
||||
"disable_stitch": disable_stitch,
|
||||
},
|
||||
"source_info": {
|
||||
"source": "FILE_UPLOAD",
|
||||
"video_size": file_size,
|
||||
"chunk_size": file_size, # Single chunk upload
|
||||
"total_chunk_count": 1,
|
||||
},
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=UPLOAD_TIMEOUT) as client:
|
||||
init_response = client.post(
|
||||
init_url, headers=self._headers(), json=init_payload
|
||||
)
|
||||
init_response.raise_for_status()
|
||||
init_data = init_response.json()
|
||||
|
||||
publish_id = init_data.get("data", {}).get("publish_id", "")
|
||||
upload_url = init_data.get("data", {}).get("upload_url", "")
|
||||
|
||||
if not upload_url:
|
||||
raise RuntimeError(
|
||||
f"TikTok API did not return upload URL: {init_data}"
|
||||
)
|
||||
|
||||
# Step 2: Upload the video file
|
||||
with open(video_path, "rb") as video_file:
|
||||
video_data = video_file.read()
|
||||
|
||||
upload_headers = {
|
||||
"Content-Type": "video/mp4",
|
||||
"Content-Range": f"bytes 0-{file_size - 1}/{file_size}",
|
||||
}
|
||||
|
||||
upload_response = client.put(
|
||||
upload_url,
|
||||
headers=upload_headers,
|
||||
content=video_data,
|
||||
)
|
||||
upload_response.raise_for_status()
|
||||
|
||||
return publish_id
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"TikTok API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"TikTok API request failed: {e}") from e
|
||||
|
||||
def get_comments(self, post_id: str) -> list[dict]:
|
||||
"""Get comments on a TikTok video.
|
||||
|
||||
Note: Requires 'video.list' scope and comment read access.
|
||||
"""
|
||||
url = f"{self.API_BASE}/comment/list/"
|
||||
payload = {
|
||||
"video_id": post_id,
|
||||
"max_count": 50,
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(url, headers=self._headers(), json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except httpx.HTTPStatusError as e:
|
||||
raise RuntimeError(
|
||||
f"TikTok API error {e.response.status_code}: {e.response.text}"
|
||||
) from e
|
||||
except httpx.RequestError as e:
|
||||
raise RuntimeError(f"TikTok API request failed: {e}") from e
|
||||
|
||||
comments = []
|
||||
for item in data.get("data", {}).get("comments", []):
|
||||
comments.append({
|
||||
"id": item.get("id", ""),
|
||||
"text": item.get("text", ""),
|
||||
"author": item.get("user", {}).get("display_name", "Unknown"),
|
||||
"created_at": str(item.get("create_time", "")),
|
||||
})
|
||||
return comments
|
||||
|
||||
def reply_to_comment(self, comment_id: str, text: str) -> bool:
|
||||
"""Reply to a TikTok comment.
|
||||
|
||||
Note: Comment reply functionality may be limited depending on API access level.
|
||||
"""
|
||||
url = f"{self.API_BASE}/comment/reply/"
|
||||
payload = {
|
||||
"comment_id": comment_id,
|
||||
"text": text,
|
||||
}
|
||||
|
||||
try:
|
||||
with httpx.Client(timeout=TIMEOUT) as client:
|
||||
response = client.post(url, headers=self._headers(), json=payload)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
except (httpx.HTTPStatusError, httpx.RequestError):
|
||||
return False
|
||||
|
||||
|
||||
def get_publisher(
|
||||
platform: str, access_token: str, **kwargs
|
||||
) -> SocialPublisher:
|
||||
"""Factory function to get a social media publisher instance.
|
||||
|
||||
Args:
|
||||
platform: One of 'facebook', 'instagram', 'youtube', 'tiktok'.
|
||||
access_token: OAuth access token for the platform.
|
||||
**kwargs: Additional platform-specific arguments:
|
||||
- facebook: page_id (str) - required
|
||||
- instagram: ig_user_id (str) - required
|
||||
|
||||
Returns:
|
||||
A SocialPublisher instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If platform is not supported or required kwargs are missing.
|
||||
"""
|
||||
platform_lower = platform.lower()
|
||||
|
||||
if platform_lower == "facebook":
|
||||
page_id = kwargs.get("page_id")
|
||||
if not page_id:
|
||||
raise ValueError("FacebookPublisher requires 'page_id' parameter")
|
||||
return FacebookPublisher(access_token=access_token, page_id=page_id)
|
||||
|
||||
elif platform_lower == "instagram":
|
||||
ig_user_id = kwargs.get("ig_user_id")
|
||||
if not ig_user_id:
|
||||
raise ValueError("InstagramPublisher requires 'ig_user_id' parameter")
|
||||
return InstagramPublisher(access_token=access_token, ig_user_id=ig_user_id)
|
||||
|
||||
elif platform_lower == "youtube":
|
||||
return YouTubePublisher(access_token=access_token)
|
||||
|
||||
elif platform_lower == "tiktok":
|
||||
return TikTokPublisher(access_token=access_token)
|
||||
|
||||
else:
|
||||
supported = "facebook, instagram, youtube, tiktok"
|
||||
raise ValueError(
|
||||
f"Unknown platform '{platform}'. Supported: {supported}"
|
||||
)
|
||||
Reference in New Issue
Block a user