feat: mobile UX fixes + Phase C one-click generation

Mobile UX:
- index.css: comprehensive mobile media queries — headings scale down,
  touch targets enforced, grid-2col-mobile collapse class, tablet breakpoint
- ContentArchive/ContentPage: grid minmax uses min(100%, Npx) to prevent
  overflow on small screens
- CharacterForm: visual style + rules editor grids collapse on mobile
- Dashboard: stat cards grid mobile-safe
- Layout: better nav touch targets, footer responsive gap

Phase C — One-Click Generation:
- Backend: GET /api/content/suggestions endpoint — LLM generates 3 topic
  ideas based on character profile and avoids repeating recent posts
- Dashboard: "Suggerimenti per oggi" section loads suggestions on mount,
  each card links to /content with prefilled topic + character
- ContentPage: reads ?topic= and ?character= URL params, auto-fills form
  and auto-triggers generation (one-click flow from Dashboard)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Michele
2026-04-05 01:28:25 +02:00
parent 16c7c4404c
commit 67bc0d2980
7 changed files with 186 additions and 25 deletions

View File

@@ -340,3 +340,78 @@ def approve_post(
db.commit()
db.refresh(post)
return post
@router.get("/suggestions")
def get_topic_suggestions(
character_id: int | None = Query(None),
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user),
):
"""Suggest 3 content topics based on character profile and recent posts."""
if character_id:
character = (
db.query(Character)
.filter(Character.id == character_id, Character.user_id == current_user.id)
.first()
)
else:
character = (
db.query(Character)
.filter(Character.user_id == current_user.id, Character.is_active == True)
.first()
)
if not character:
return {"suggestions": [], "character_id": None}
provider_name = _get_setting(db, "llm_provider", current_user.id)
api_key = _get_setting(db, "llm_api_key", current_user.id)
model = _get_setting(db, "llm_model", current_user.id)
if not provider_name or not api_key:
return {"suggestions": [], "character_id": character.id, "needs_setup": True}
recent_posts = (
db.query(Post)
.filter(Post.character_id == character.id)
.order_by(Post.created_at.desc())
.limit(5)
.all()
)
recent_topics = [p.text_content[:100] for p in recent_posts if p.text_content]
recent_str = "\n".join(f"- {t}" for t in recent_topics) if recent_topics else "Nessun post recente."
base_url = _get_setting(db, "llm_base_url", current_user.id)
llm = get_llm_provider(provider_name, api_key, model, base_url=base_url)
topics = character.topics or []
niche = character.niche or "general"
target = character.target_audience or ""
system_prompt = (
"Sei un social media strategist esperto. "
"Suggerisci 3 idee per post social, ciascuna su una riga. "
"Ogni idea deve essere una frase breve (max 15 parole) che descrive il topic. "
"Non numerare, non aggiungere spiegazioni. Solo 3 righe, una per idea."
)
prompt = (
f"Personaggio: {character.name}, nicchia: {niche}\n"
f"Topic abituali: {', '.join(topics) if topics else 'generici'}\n"
f"Target: {target}\n"
f"Post recenti (evita ripetizioni):\n{recent_str}\n\n"
f"Suggerisci 3 idee per post nuovi e diversi dai recenti:"
)
try:
result = llm.generate(prompt, system=system_prompt)
lines = [line.strip() for line in result.strip().splitlines() if line.strip()]
suggestions = lines[:3]
except Exception:
suggestions = []
return {
"suggestions": suggestions,
"character_id": character.id,
"character_name": character.name,
}