Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -49,6 +49,21 @@ async def call_model(prompt: str, model: str = "gpt-4o", api_key: str = None, ma
|
|
| 49 |
raise
|
| 50 |
raise Exception(f"Failed to get response after {max_retries} attempts.")
|
| 51 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
# -------------------- Shared Context --------------------
|
| 53 |
class Context:
|
| 54 |
def __init__(self, original_task: str, optimized_task: Optional[str] = None,
|
|
@@ -64,7 +79,8 @@ class Context:
|
|
| 64 |
self.test_cases = test_cases
|
| 65 |
self.test_results = test_results
|
| 66 |
self.documentation = documentation
|
| 67 |
-
|
|
|
|
| 68 |
|
| 69 |
def add_conversation_entry(self, agent_name: str, message: str):
|
| 70 |
self.conversation_history.append({"agent": agent_name, "message": message})
|
|
@@ -292,7 +308,8 @@ def process_conversation_generator(task_message: str, api_key: str,
|
|
| 292 |
try:
|
| 293 |
msg = log_queue.get(timeout=0.1)
|
| 294 |
if isinstance(msg, tuple) and msg[0] == "result":
|
| 295 |
-
|
|
|
|
| 296 |
else:
|
| 297 |
yield msg
|
| 298 |
except queue.Empty:
|
|
@@ -320,7 +337,7 @@ with gr.Blocks() as demo:
|
|
| 320 |
gr.Markdown("## Multi-Agent Task Solver with Human-in-the-Loop")
|
| 321 |
|
| 322 |
with gr.Row():
|
| 323 |
-
# Set type="messages" to
|
| 324 |
chat_output = gr.Chatbot(label="Conversation", type="messages")
|
| 325 |
|
| 326 |
with gr.Row():
|
|
@@ -331,7 +348,7 @@ with gr.Blocks() as demo:
|
|
| 331 |
|
| 332 |
send_button = gr.Button("Send")
|
| 333 |
|
| 334 |
-
#
|
| 335 |
send_button.click(fn=multi_agent_chat, inputs=[message_input, api_key_input], outputs=chat_output)
|
| 336 |
|
| 337 |
if __name__ == "__main__":
|
|
|
|
| 49 |
raise
|
| 50 |
raise Exception(f"Failed to get response after {max_retries} attempts.")
|
| 51 |
|
| 52 |
+
# -------------------- Conversation History Conversion --------------------
|
| 53 |
+
def convert_history(history: List[Dict[str, str]]) -> List[Dict[str, str]]:
|
| 54 |
+
"""
|
| 55 |
+
Convert our internal conversation history format (with 'agent' and 'message' keys)
|
| 56 |
+
into the Gradio messages format (with 'role' and 'content').
|
| 57 |
+
"""
|
| 58 |
+
converted = []
|
| 59 |
+
for entry in history:
|
| 60 |
+
if entry["agent"].lower() == "user":
|
| 61 |
+
converted.append({"role": "user", "content": entry["message"]})
|
| 62 |
+
else:
|
| 63 |
+
# You can customize the formatting below as desired.
|
| 64 |
+
converted.append({"role": "assistant", "content": f'{entry["agent"]}: {entry["message"]}'})
|
| 65 |
+
return converted
|
| 66 |
+
|
| 67 |
# -------------------- Shared Context --------------------
|
| 68 |
class Context:
|
| 69 |
def __init__(self, original_task: str, optimized_task: Optional[str] = None,
|
|
|
|
| 79 |
self.test_cases = test_cases
|
| 80 |
self.test_results = test_results
|
| 81 |
self.documentation = documentation
|
| 82 |
+
# Initialize conversation history with the user's original task.
|
| 83 |
+
self.conversation_history = conversation_history or [{"agent": "User", "message": original_task}]
|
| 84 |
|
| 85 |
def add_conversation_entry(self, agent_name: str, message: str):
|
| 86 |
self.conversation_history.append({"agent": agent_name, "message": message})
|
|
|
|
| 308 |
try:
|
| 309 |
msg = log_queue.get(timeout=0.1)
|
| 310 |
if isinstance(msg, tuple) and msg[0] == "result":
|
| 311 |
+
# Convert conversation history to the required messages format.
|
| 312 |
+
yield gr.Chatbot.update(value=convert_history(msg[1]))
|
| 313 |
else:
|
| 314 |
yield msg
|
| 315 |
except queue.Empty:
|
|
|
|
| 337 |
gr.Markdown("## Multi-Agent Task Solver with Human-in-the-Loop")
|
| 338 |
|
| 339 |
with gr.Row():
|
| 340 |
+
# Set type="messages" to match the expected format.
|
| 341 |
chat_output = gr.Chatbot(label="Conversation", type="messages")
|
| 342 |
|
| 343 |
with gr.Row():
|
|
|
|
| 348 |
|
| 349 |
send_button = gr.Button("Send")
|
| 350 |
|
| 351 |
+
# Note: 'stream=True' has been removed.
|
| 352 |
send_button.click(fn=multi_agent_chat, inputs=[message_input, api_key_input], outputs=chat_output)
|
| 353 |
|
| 354 |
if __name__ == "__main__":
|