S-Dreamer commited on
Commit
a1cde8b
·
verified ·
1 Parent(s): 4da1c72

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -127
app.py CHANGED
@@ -81,131 +81,6 @@ def normalize_result(res: Dict[str, Any]):
81
  # ANALYST COPILOT (LLM)
82
  # ---------------------------------------------------------------------
83
 
84
- def respond(
85
- message,
86
- history,
87
- system_prompt,
88
- model_name,
89
- hf_token,
90
- temperature,
91
- top_p,
92
- max_tokens,
93
- ):
94
- """Streaming response from WhiteRabbit Neo or Cybertron."""
95
- client = InferenceClient(model=model_name, token=hf_token.token)
96
-
97
- msgs = [{"role": "system", "content": system_prompt}]
98
- msgs.extend(history)
99
- msgs.append({"role": "user", "content": message})
100
-
101
- buf = ""
102
- for chunk in client.chat_completion(
103
- messages=msgs,
104
- max_tokens=max_tokens,
105
- temperature=temperature,
106
- top_p=top_p,
107
- stream=True,
108
- ):
109
- delta = chunk.choices[0].delta.content
110
- if delta:
111
- buf += delta
112
- yield buf
113
-
114
- def inject_osint(history, osint_obj):
115
- """Inject raw JSON results into the chat context."""
116
- pretty = json.dumps(osint_obj, indent=2, default=str)
117
- history.append({
118
- "role": "system",
119
- "content": f"### Injected OSINT Result\n```\n{pretty}\n```"
120
- })
121
- return history
122
-
123
- # ---------------------------------------------------------------------
124
- # OSINT Dashboard Callbacks
125
- # ---------------------------------------------------------------------
126
-
127
- def ui_lookup_ip(ip, enrich, mitre):
128
- raw = call_task("lookup_ip", {"ip": ip, "enrich": enrich, "map_mitre": mitre})
129
- norm = normalize_result(raw)
130
- return norm["summary"], norm["markdown"], norm["json"], norm["mitre"], norm["stix"], raw
131
-
132
- def ui_lookup_domain(domain, enrich, mitre):
133
- raw = call_task("lookup_domain", {"domain": domain, "enrich": enrich, "map_mitre": mitre})
134
- norm = normalize_result(raw)
135
- return norm["summary"], norm["markdown"], norm["json"], norm["mitre"], norm["stix"], raw
136
-
137
- def ui_lookup_hash(h, ht, enrich, mitre):
138
- raw = call_task("lookup_hash", {"hash": h, "hash_type": ht, "enrich": enrich, "map_mitre": mitre})
139
- norm = normalize_result(raw)
140
- return norm["summary"], norm["markdown"], norm["json"], norm["mitre"], norm["stix"], raw
141
-
142
- def ui_correlate_iocs(iocs):
143
- lst = [x.strip() for x in iocs.splitlines() if x.strip()]
144
- raw = call_task("correlate_iocs", {"iocs": lst})
145
- norm = normalize_result(raw)
146
- return norm["summary"], norm["markdown"], norm["json"], norm["mitre"], raw
147
-
148
- def ui_quickscan(target):
149
- raw = call_task("quickscan", {"target": target})
150
- norm = normalize_result(raw)
151
- return norm["summary"], norm["markdown"], norm["json"], raw
152
-
153
- # ---------------------------------------------------------------------
154
- # MCP Bridge
155
- # ---------------------------------------------------------------------
156
-
157
- def ui_bridge(tool, args_json):
158
- try:
159
- payload = json.loads(args_json)
160
- except Exception as e:
161
- return json.dumps({"error": str(e)}, indent=2), "", {}
162
- raw = call_task(tool, payload)
163
- norm = normalize_result(raw)
164
- return norm["json"], norm["markdown"], raw
165
-
166
- # ---------------------------------------------------------------------
167
- # BUILD UI
168
- # ---------------------------------------------------------------------
169
-
170
- def build_interface():
171
- with gr.Blocks(title="Parrot OSINT MCP Console") as demo:
172
- gr.Markdown("# Parrot OSINT MCP Console")
173
-
174
- osint_state = gr.State({})
175
-
176
- # -------------------------
177
- # OSINT Dashboard
178
- # -------------------------
179
- with gr.Tab("OSINT Dashboard"):
180
- with gr.Tab("IP"):
181
- ip = gr.Textbox(label="IP Address")
182
- enrich = gr.Checkbox(value=True, label="Enrich")
183
- mitre = gr.Checkbox(value=True, label="MITRE Map")
184
- run = gr.Button("Run IP Lookup")
185
-
186
- s = gr.Textbox(label="Summary")
187
- md = gr.Markdown()
188
- js = gr.Code(language="json")
189
- mt = gr.Code(language="json")
190
- st = gr.Code(language="json")
191
-
192
- run.click(ui_lookup_ip, [ip, enrich, mitre], [s, md, js, mt, st, osint_state])
193
-
194
- # Add other tabs (Domain, Hash, etc.)
195
- # Your earlier implementation plugs in cleanly.
196
-
197
- # -------------------------
198
- # MCP Bridge
199
- # -------------------------
200
- with gr.Tab("MCP Bridge"):
201
- tool = gr.Dropdown(sorted(TASK_REGISTRY.keys()))
202
- args = gr.Code(language="json")
203
- btn = gr.Button("Call Tool")
204
- out_js = gr.Code(language="json")
205
- out_md = gr.Markdown()
206
-
207
- btn.click(ui_bridge, [tool, args], [out_js, out_md, osint_state])
208
-
209
  # -------------------------
210
  # Analyst Copilot
211
  # -------------------------
@@ -231,13 +106,16 @@ def build_interface():
231
  value="berkeley-nest/WhiteRabbitNeo-8B",
232
  )
233
 
 
 
 
234
  chatbot = gr.ChatInterface(
235
  respond,
236
  type="messages",
237
  additional_inputs=[
238
  system_prompt,
239
  model_select,
240
- gr.OAuthToken(label="HF Token"),
241
  gr.Slider(0.1, 2.0, value=0.7, step=0.1, label="Temperature"),
242
  gr.Slider(0.1, 1.0, value=0.95, step=0.05, label="Top-p"),
243
  gr.Slider(32, 4096, value=512, step=32, label="Max Tokens"),
@@ -251,7 +129,7 @@ def build_interface():
251
  outputs=[chatbot._chatbot_state],
252
  )
253
 
254
- return demo
255
 
256
 
257
  if __name__ == "__main__":
 
81
  # ANALYST COPILOT (LLM)
82
  # ---------------------------------------------------------------------
83
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
84
  # -------------------------
85
  # Analyst Copilot
86
  # -------------------------
 
106
  value="berkeley-nest/WhiteRabbitNeo-8B",
107
  )
108
 
109
+ gr.Markdown("### Provide your HuggingFace API Token for the LLM:")
110
+ hf_token = gr.OAuthToken()
111
+
112
  chatbot = gr.ChatInterface(
113
  respond,
114
  type="messages",
115
  additional_inputs=[
116
  system_prompt,
117
  model_select,
118
+ hf_token,
119
  gr.Slider(0.1, 2.0, value=0.7, step=0.1, label="Temperature"),
120
  gr.Slider(0.1, 1.0, value=0.95, step=0.05, label="Top-p"),
121
  gr.Slider(32, 4096, value=512, step=32, label="Max Tokens"),
 
129
  outputs=[chatbot._chatbot_state],
130
  )
131
 
132
+ return demo
133
 
134
 
135
  if __name__ == "__main__":