fix: pass editorial brief to LLM prompt + improve missing API key error
- Add 'brief' field to GenerateContentRequest schema - Pass brief from router to generate_post_text service - Inject brief as mandatory instructions in LLM prompt with highest priority - Return structured error when LLM provider/API key not configured - Show dedicated warning banner with link to Settings when API key missing Fixes: content ignoring editorial brief, unhelpful API key error messages Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -78,9 +78,21 @@ def generate_content(
|
||||
model = request.model or _get_setting(db, "llm_model", current_user.id)
|
||||
|
||||
if not provider_name:
|
||||
raise HTTPException(status_code=400, detail="LLM provider not configured. Set 'llm_provider' in settings.")
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"message": "Provider AI non configurato. Vai in Impostazioni → Provider AI per scegliere il provider e inserire la API key.",
|
||||
"missing_settings": True,
|
||||
},
|
||||
)
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=400, detail="LLM API key not configured. Set 'llm_api_key' in settings.")
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"message": "API key non configurata. Vai in Impostazioni → Provider AI per inserire la tua API key.",
|
||||
"missing_settings": True,
|
||||
},
|
||||
)
|
||||
|
||||
# Build character dict for content service
|
||||
char_dict = {
|
||||
@@ -98,6 +110,7 @@ def generate_content(
|
||||
llm_provider=llm,
|
||||
platform=request.effective_platform,
|
||||
topic_hint=request.topic_hint,
|
||||
brief=request.brief,
|
||||
)
|
||||
|
||||
# Generate hashtags
|
||||
|
||||
@@ -108,6 +108,7 @@ class GenerateContentRequest(BaseModel):
|
||||
platforms: List[str] = [] # new: multi-platform (overrides platform if non-empty)
|
||||
content_types: List[str] = [] # new: multi-type (overrides content_type if non-empty)
|
||||
topic_hint: Optional[str] = None
|
||||
brief: Optional[str] = None # editorial brief: technique + instructions for the LLM
|
||||
include_affiliates: bool = True
|
||||
provider: Optional[str] = None
|
||||
model: Optional[str] = None
|
||||
|
||||
@@ -15,6 +15,7 @@ def generate_post_text(
|
||||
llm_provider: LLMProvider,
|
||||
platform: str,
|
||||
topic_hint: str | None = None,
|
||||
brief: str | None = None,
|
||||
) -> str:
|
||||
"""Generate social media post text based on a character profile.
|
||||
|
||||
@@ -23,6 +24,7 @@ def generate_post_text(
|
||||
topic_hint: Optional topic suggestion to guide generation.
|
||||
llm_provider: LLM provider instance for text generation.
|
||||
platform: Target platform (e.g. 'instagram', 'facebook', 'tiktok', 'youtube').
|
||||
brief: Optional editorial brief with narrative technique and detailed instructions.
|
||||
|
||||
Returns:
|
||||
Generated post text as a string.
|
||||
@@ -78,8 +80,17 @@ def generate_post_text(
|
||||
if topic_hint:
|
||||
topic_instruction = f" The post should be about: {topic_hint}."
|
||||
|
||||
# Brief is the highest-priority instruction — it overrides defaults
|
||||
brief_instruction = ""
|
||||
if brief:
|
||||
brief_instruction = (
|
||||
f"\n\nISRUZIONI OBBLIGATORIE DAL BRIEF EDITORIALE:\n{brief}\n"
|
||||
f"Rispetta TUTTE le istruzioni del brief. "
|
||||
f"Il brief ha priorità su qualsiasi altra indicazione."
|
||||
)
|
||||
|
||||
prompt = (
|
||||
f"{guidance}{topic_instruction}\n\n"
|
||||
f"{guidance}{topic_instruction}{brief_instruction}\n\n"
|
||||
f"Write the post now. Output ONLY the post text, nothing else."
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user