gradio_3 (#4)
Browse files- gradio-4 (c951f406c1fd164c9ff1ec899ed5227b730fb3b7)
- ComputeAgent/main.py +1 -23
- Gradio_interface.py +29 -29
- app.py +4 -1
ComputeAgent/main.py
CHANGED
|
@@ -259,26 +259,4 @@ async def log_requests(request: Request, call_next):
|
|
| 259 |
return response
|
| 260 |
except Exception as e:
|
| 261 |
logger.error(f"β {request.method} {request.url.path} - Error: {e}")
|
| 262 |
-
raise
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
if __name__ == "__main__":
|
| 266 |
-
"""
|
| 267 |
-
Run the FastAPI application with uvicorn.
|
| 268 |
-
|
| 269 |
-
Configuration:
|
| 270 |
-
- Host: 0.0.0.0 (accessible from network)
|
| 271 |
-
- Port: 8000
|
| 272 |
-
- Reload: Enabled for development
|
| 273 |
-
- Log level: info
|
| 274 |
-
"""
|
| 275 |
-
logger.info("π¬ Starting uvicorn server...")
|
| 276 |
-
|
| 277 |
-
uvicorn.run(
|
| 278 |
-
"main:app",
|
| 279 |
-
host="0.0.0.0",
|
| 280 |
-
port=8000,
|
| 281 |
-
reload=False,
|
| 282 |
-
log_level="info",
|
| 283 |
-
access_log=True
|
| 284 |
-
)
|
|
|
|
| 259 |
return response
|
| 260 |
except Exception as e:
|
| 261 |
logger.error(f"β {request.method} {request.url.path} - Error: {e}")
|
| 262 |
+
raise
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Gradio_interface.py
CHANGED
|
@@ -1085,39 +1085,39 @@ def create_gradio_demo(api_base_url: str = "http://localhost:8000"):
|
|
| 1085 |
computeagent_logo_html = f'<img src="data:image/png;base64,{COMPUTEAGENT_LOGO_BASE64}" alt="ComputeAgent Logo" style="height: 60px; width: auto; object-fit: contain; margin-right: 15px;">' if COMPUTEAGENT_LOGO_BASE64 else ''
|
| 1086 |
|
| 1087 |
gr.HTML(f"""
|
| 1088 |
-
|
| 1089 |
-
|
| 1090 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1091 |
<div>
|
| 1092 |
-
|
| 1093 |
-
<p style="margin: 10px 0 0 0; opacity: 0.9;">
|
| 1094 |
-
Hivenet AI-Powered Deployment using MCP of Compute by Hivenet
|
| 1095 |
-
</p>
|
| 1096 |
</div>
|
| 1097 |
</div>
|
| 1098 |
-
|
| 1099 |
-
|
| 1100 |
-
|
| 1101 |
-
|
| 1102 |
-
|
| 1103 |
-
|
| 1104 |
-
|
| 1105 |
-
|
| 1106 |
-
|
| 1107 |
-
|
| 1108 |
-
label="Agent Conversation",
|
| 1109 |
-
height=900,
|
| 1110 |
-
show_copy_button=True,
|
| 1111 |
-
elem_classes=["chatbot"]
|
| 1112 |
-
)
|
| 1113 |
-
|
| 1114 |
-
with gr.Row():
|
| 1115 |
-
msg = gr.Textbox(
|
| 1116 |
-
placeholder="Deploy meta-llama/Llama-3.1-70B or ask: What are the latest AI developments?",
|
| 1117 |
-
scale=5,
|
| 1118 |
-
show_label=False
|
| 1119 |
)
|
| 1120 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1121 |
|
| 1122 |
# ================================================================
|
| 1123 |
# CAPACITY APPROVAL PANEL
|
|
|
|
| 1085 |
computeagent_logo_html = f'<img src="data:image/png;base64,{COMPUTEAGENT_LOGO_BASE64}" alt="ComputeAgent Logo" style="height: 60px; width: auto; object-fit: contain; margin-right: 15px;">' if COMPUTEAGENT_LOGO_BASE64 else ''
|
| 1086 |
|
| 1087 |
gr.HTML(f"""
|
| 1088 |
+
<div class="header-box" style="display: flex; justify-content: space-between; align-items: center;">
|
| 1089 |
+
<div style="display: flex; align-items: center;">
|
| 1090 |
+
{computeagent_logo_html}
|
| 1091 |
+
<div>
|
| 1092 |
+
<h1 style="margin: 0; font-size: 2.5em;">ComputeAgent</h1>
|
| 1093 |
+
<p style="margin: 10px 0 0 0; opacity: 0.9;">
|
| 1094 |
+
Hivenet AI-Powered Deployment using MCP of Compute by Hivenet
|
| 1095 |
+
</p>
|
| 1096 |
+
</div>
|
| 1097 |
+
</div>
|
| 1098 |
<div>
|
| 1099 |
+
{hivenet_logo_html}
|
|
|
|
|
|
|
|
|
|
| 1100 |
</div>
|
| 1101 |
</div>
|
| 1102 |
+
""")
|
| 1103 |
+
|
| 1104 |
+
with gr.Row():
|
| 1105 |
+
with gr.Column(scale=11):
|
| 1106 |
+
# Chat interface
|
| 1107 |
+
chatbot = gr.Chatbot(
|
| 1108 |
+
label="Agent Conversation",
|
| 1109 |
+
height=900,
|
| 1110 |
+
show_copy_button=True,
|
| 1111 |
+
elem_classes=["chatbot"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1112 |
)
|
| 1113 |
+
|
| 1114 |
+
with gr.Row():
|
| 1115 |
+
msg = gr.Textbox(
|
| 1116 |
+
placeholder="Deploy meta-llama/Llama-3.1-70B or ask: What are the latest AI developments?",
|
| 1117 |
+
scale=5,
|
| 1118 |
+
show_label=False
|
| 1119 |
+
)
|
| 1120 |
+
send_btn = gr.Button("π Send", variant="primary", scale=1)
|
| 1121 |
|
| 1122 |
# ================================================================
|
| 1123 |
# CAPACITY APPROVAL PANEL
|
app.py
CHANGED
|
@@ -9,6 +9,10 @@ import logging
|
|
| 9 |
import uvicorn
|
| 10 |
import gradio as gr
|
| 11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
# Setup logging
|
| 13 |
logging.basicConfig(
|
| 14 |
level=logging.INFO,
|
|
@@ -17,7 +21,6 @@ logging.basicConfig(
|
|
| 17 |
logger = logging.getLogger("ComputeAgent-HF")
|
| 18 |
|
| 19 |
# Import the FastAPI app from ComputeAgent
|
| 20 |
-
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'ComputeAgent'))
|
| 21 |
from ComputeAgent.main import app as fastapi_app
|
| 22 |
|
| 23 |
# Import the Gradio interface
|
|
|
|
| 9 |
import uvicorn
|
| 10 |
import gradio as gr
|
| 11 |
|
| 12 |
+
# Add current directory to path
|
| 13 |
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
| 14 |
+
sys.path.insert(0, current_dir)
|
| 15 |
+
|
| 16 |
# Setup logging
|
| 17 |
logging.basicConfig(
|
| 18 |
level=logging.INFO,
|
|
|
|
| 21 |
logger = logging.getLogger("ComputeAgent-HF")
|
| 22 |
|
| 23 |
# Import the FastAPI app from ComputeAgent
|
|
|
|
| 24 |
from ComputeAgent.main import app as fastapi_app
|
| 25 |
|
| 26 |
# Import the Gradio interface
|