Spaces:
Running
Running
Cleo
commited on
Commit
Β·
c2d7928
1
Parent(s):
fce43bd
reorganize
Browse files- agent/agent.py β agent.py +9 -9
- agent/__init__.py +0 -1
- app.py +2 -2
- agent/prompts.py β prompts.py +0 -0
- agent/utils.py β utils.py +0 -0
agent/agent.py β agent.py
RENAMED
|
@@ -132,8 +132,8 @@ def call_llm(messages: List[Dict[str, Any]], temperature: float = 0.7, model: st
|
|
| 132 |
|
| 133 |
def generate_initial_vibe(state: AgentState) -> AgentState:
|
| 134 |
"""Node: Generate initial vibe description from uploaded images using VLM"""
|
| 135 |
-
from
|
| 136 |
-
from
|
| 137 |
|
| 138 |
client = create_openai_client()
|
| 139 |
|
|
@@ -223,8 +223,8 @@ def generate_initial_vibe(state: AgentState) -> AgentState:
|
|
| 223 |
|
| 224 |
def refine_vibe(state: AgentState) -> AgentState:
|
| 225 |
"""Node: Refine vibe based on user feedback - only refines feels_like portion"""
|
| 226 |
-
from
|
| 227 |
-
from
|
| 228 |
|
| 229 |
print("[DEBUG AGENT] refine_vibe node started")
|
| 230 |
|
|
@@ -278,7 +278,7 @@ def refine_vibe(state: AgentState) -> AgentState:
|
|
| 278 |
|
| 279 |
def check_vibe_satisfaction(state: AgentState) -> Literal["refine", "retrieve"]:
|
| 280 |
"""Conditional edge: Check if user is satisfied with vibe description"""
|
| 281 |
-
from
|
| 282 |
|
| 283 |
# Get the last user message
|
| 284 |
user_messages = [m for m in state["messages"] if m.get("role") == "user"]
|
|
@@ -567,7 +567,7 @@ def fetch_book_metadata(state: AgentState) -> AgentState:
|
|
| 567 |
|
| 568 |
def _generate_narrowing_question(state: AgentState, question_num: int) -> tuple:
|
| 569 |
"""Helper: Generate a narrowing question"""
|
| 570 |
-
from
|
| 571 |
|
| 572 |
books_summary_parts = []
|
| 573 |
for i, b in enumerate(state["books_with_metadata"], 1):
|
|
@@ -669,7 +669,7 @@ def finalize_books(state: AgentState) -> AgentState:
|
|
| 669 |
"""Node: Use reasoning to select final 3 books based on vibe and preferences"""
|
| 670 |
print(f"[DEBUG AGENT] finalize_books node started")
|
| 671 |
print(f"[DEBUG AGENT] books_with_metadata count: {len(state.get('books_with_metadata', []))}")
|
| 672 |
-
from
|
| 673 |
|
| 674 |
# Build detailed book summary with full descriptions - no truncation
|
| 675 |
books_summary_parts = []
|
|
@@ -693,7 +693,7 @@ def finalize_books(state: AgentState) -> AgentState:
|
|
| 693 |
|
| 694 |
# Use reasoning model for book selection - this is a complex decision
|
| 695 |
# Increase max_tokens since we're sending full book descriptions
|
| 696 |
-
selection_response, reasoning = call_llm(messages, temperature=0.3, model=REASONING_MODEL, include_reasoning=True, max_tokens=
|
| 697 |
|
| 698 |
# Log reasoning even if empty
|
| 699 |
state["reasoning"].append(f"π§ REASONING (Book Selection):\n{reasoning or 'No reasoning provided'}")
|
|
@@ -755,7 +755,7 @@ def generate_soundtrack(state: AgentState) -> AgentState:
|
|
| 755 |
print(f"[DEBUG AGENT] vibe_context built: {list(vibe_context.keys())}")
|
| 756 |
|
| 757 |
# Use LLM to generate music prompt from vibe context
|
| 758 |
-
from
|
| 759 |
|
| 760 |
messages = [
|
| 761 |
{"role": "system", "content": MUSIC_PROMPT_GENERATION},
|
|
|
|
| 132 |
|
| 133 |
def generate_initial_vibe(state: AgentState) -> AgentState:
|
| 134 |
"""Node: Generate initial vibe description from uploaded images using VLM"""
|
| 135 |
+
from prompts import VIBE_EXTRACTION
|
| 136 |
+
from utils import parse_json_response, extract_vibe_components
|
| 137 |
|
| 138 |
client = create_openai_client()
|
| 139 |
|
|
|
|
| 223 |
|
| 224 |
def refine_vibe(state: AgentState) -> AgentState:
|
| 225 |
"""Node: Refine vibe based on user feedback - only refines feels_like portion"""
|
| 226 |
+
from prompts import VIBE_REFINEMENT
|
| 227 |
+
from utils import strip_thinking_tags
|
| 228 |
|
| 229 |
print("[DEBUG AGENT] refine_vibe node started")
|
| 230 |
|
|
|
|
| 278 |
|
| 279 |
def check_vibe_satisfaction(state: AgentState) -> Literal["refine", "retrieve"]:
|
| 280 |
"""Conditional edge: Check if user is satisfied with vibe description"""
|
| 281 |
+
from prompts import VIBE_SATISFACTION_CHECKER
|
| 282 |
|
| 283 |
# Get the last user message
|
| 284 |
user_messages = [m for m in state["messages"] if m.get("role") == "user"]
|
|
|
|
| 567 |
|
| 568 |
def _generate_narrowing_question(state: AgentState, question_num: int) -> tuple:
|
| 569 |
"""Helper: Generate a narrowing question"""
|
| 570 |
+
from prompts import NARROWING_QUESTION_GENERATOR
|
| 571 |
|
| 572 |
books_summary_parts = []
|
| 573 |
for i, b in enumerate(state["books_with_metadata"], 1):
|
|
|
|
| 669 |
"""Node: Use reasoning to select final 3 books based on vibe and preferences"""
|
| 670 |
print(f"[DEBUG AGENT] finalize_books node started")
|
| 671 |
print(f"[DEBUG AGENT] books_with_metadata count: {len(state.get('books_with_metadata', []))}")
|
| 672 |
+
from prompts import get_book_finalizer_prompt
|
| 673 |
|
| 674 |
# Build detailed book summary with full descriptions - no truncation
|
| 675 |
books_summary_parts = []
|
|
|
|
| 693 |
|
| 694 |
# Use reasoning model for book selection - this is a complex decision
|
| 695 |
# Increase max_tokens since we're sending full book descriptions
|
| 696 |
+
selection_response, reasoning = call_llm(messages, temperature=0.3, model=REASONING_MODEL, include_reasoning=True, max_tokens=5000)
|
| 697 |
|
| 698 |
# Log reasoning even if empty
|
| 699 |
state["reasoning"].append(f"π§ REASONING (Book Selection):\n{reasoning or 'No reasoning provided'}")
|
|
|
|
| 755 |
print(f"[DEBUG AGENT] vibe_context built: {list(vibe_context.keys())}")
|
| 756 |
|
| 757 |
# Use LLM to generate music prompt from vibe context
|
| 758 |
+
from prompts import MUSIC_PROMPT_GENERATION
|
| 759 |
|
| 760 |
messages = [
|
| 761 |
{"role": "system", "content": MUSIC_PROMPT_GENERATION},
|
agent/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
"""Agent package for Vibe Reader"""
|
|
|
|
|
|
app.py
CHANGED
|
@@ -11,7 +11,7 @@ import logging
|
|
| 11 |
warnings.filterwarnings("ignore", message=".*Invalid file descriptor.*")
|
| 12 |
logging.getLogger("asyncio").setLevel(logging.CRITICAL)
|
| 13 |
|
| 14 |
-
from agent
|
| 15 |
|
| 16 |
|
| 17 |
|
|
@@ -306,4 +306,4 @@ with gr.Blocks() as demo:
|
|
| 306 |
|
| 307 |
if __name__ == "__main__":
|
| 308 |
# Note: css_paths removed as custom.css location may vary
|
| 309 |
-
demo.queue().launch(theme=gr.themes.Monochrome(), share=True, ssr_mode=False)
|
|
|
|
| 11 |
warnings.filterwarnings("ignore", message=".*Invalid file descriptor.*")
|
| 12 |
logging.getLogger("asyncio").setLevel(logging.CRITICAL)
|
| 13 |
|
| 14 |
+
from agent import run_agent
|
| 15 |
|
| 16 |
|
| 17 |
|
|
|
|
| 306 |
|
| 307 |
if __name__ == "__main__":
|
| 308 |
# Note: css_paths removed as custom.css location may vary
|
| 309 |
+
demo.queue().launch(theme=gr.themes.Monochrome(), css_paths='assets/custom.css'), share=True, ssr_mode=False)
|
agent/prompts.py β prompts.py
RENAMED
|
File without changes
|
agent/utils.py β utils.py
RENAMED
|
File without changes
|