Fix initial prompt injection
Browse files
app.py
CHANGED
|
@@ -97,17 +97,20 @@ def chat_with_model(messages):
|
|
| 97 |
yield messages + [{"role": "assistant", "content": "⚠️ No model loaded."}]
|
| 98 |
return
|
| 99 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 100 |
pad_id = current_tokenizer.pad_token_id
|
| 101 |
eos_id = current_tokenizer.eos_token_id
|
| 102 |
if pad_id is None:
|
| 103 |
pad_id = current_tokenizer.unk_token_id or 0
|
| 104 |
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
max_new_tokens = 1024
|
| 108 |
-
generated_tokens = 0
|
| 109 |
|
| 110 |
-
#
|
| 111 |
system_messages = [
|
| 112 |
{
|
| 113 |
"role": "system",
|
|
@@ -132,26 +135,8 @@ def chat_with_model(messages):
|
|
| 132 |
}
|
| 133 |
]
|
| 134 |
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
welcome_message = (
|
| 138 |
-
"**Welcome to the Radiologist's Companion!**\n\n"
|
| 139 |
-
"You can ask me about the patient's medical history or available imaging data.\n"
|
| 140 |
-
"- I can summarize key details from the EHR.\n"
|
| 141 |
-
"- I can tell you which medical images are available.\n"
|
| 142 |
-
"- If you'd like an organ segmentation (e.g. spleen, liver, kidney_left, colon, femur_right) on an abdominal CT scan, just ask!\n\n"
|
| 143 |
-
"**Example Requests:**\n"
|
| 144 |
-
"- \"What do we know about this patient?\"\n"
|
| 145 |
-
"- \"Which images are available for this patient?\"\n"
|
| 146 |
-
"- \"Can you segment the spleen from the CT scan?\"\n"
|
| 147 |
-
)
|
| 148 |
-
|
| 149 |
-
# If it's the first user message (i.e., no assistant yet), prepend welcome
|
| 150 |
-
if len(messages) == 1 and messages[0]['role'] == 'user':
|
| 151 |
-
messages = [{"role": "assistant", "content": welcome_message}] + messages
|
| 152 |
|
| 153 |
-
# Merge full conversation
|
| 154 |
-
full_messages = system_messages + messages
|
| 155 |
|
| 156 |
prompt = format_prompt(full_messages)
|
| 157 |
|
|
@@ -277,31 +262,19 @@ def add_user_message(user_input, history):
|
|
| 277 |
return "", patient_conversations[current_id]
|
| 278 |
|
| 279 |
|
| 280 |
-
|
| 281 |
def autofill_patient(patient_key):
|
| 282 |
if patient_key in patient_db:
|
| 283 |
info = patient_db[patient_key]
|
| 284 |
|
| 285 |
-
# Init conversation if not existing
|
| 286 |
if info["id"] not in patient_conversations:
|
| 287 |
-
|
| 288 |
-
"**Welcome to the Radiologist's Companion!**\n\n"
|
| 289 |
-
"You can ask me about the patient's medical history or available imaging data.\n"
|
| 290 |
-
"- I can summarize key details from the EHR.\n"
|
| 291 |
-
"- I can tell you which medical images are available.\n"
|
| 292 |
-
"- If you'd like an organ segmentation (e.g. spleen, liver, kidney_left, colon, femur_right) on an abdominal CT scan, just ask!\n\n"
|
| 293 |
-
"**Example Requests:**\n"
|
| 294 |
-
"- \"What do we know about this patient?\"\n"
|
| 295 |
-
"- \"Which images are available for this patient?\"\n"
|
| 296 |
-
"- \"Can you segment the spleen from the CT scan?\"\n"
|
| 297 |
-
)
|
| 298 |
-
|
| 299 |
-
patient_conversations[info["id"]] = [{"role": "assistant", "content": welcome_message}]
|
| 300 |
|
| 301 |
return info["name"], info["age"], info["id"], info["notes"]
|
| 302 |
return "", "", "", ""
|
| 303 |
|
| 304 |
|
|
|
|
| 305 |
with gr.Blocks(css=".gradio-container {height: 100vh; overflow: hidden;}") as demo:
|
| 306 |
gr.Markdown("<h2 style='text-align: center;'>Radiologist's Companion</h2>")
|
| 307 |
|
|
@@ -347,11 +320,28 @@ with gr.Blocks(css=".gradio-container {height: 100vh; overflow: hidden;}") as de
|
|
| 347 |
# After patient selected, load their conversation into chatbot
|
| 348 |
def load_patient_conversation(patient_key):
|
| 349 |
if patient_key in patient_db:
|
| 350 |
-
|
| 351 |
-
history = patient_conversations.get(
|
| 352 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 353 |
return []
|
| 354 |
|
|
|
|
| 355 |
patient_selector.change(
|
| 356 |
autofill_patient,
|
| 357 |
inputs=[patient_selector],
|
|
|
|
| 97 |
yield messages + [{"role": "assistant", "content": "⚠️ No model loaded."}]
|
| 98 |
return
|
| 99 |
|
| 100 |
+
current_id = patient_id.value
|
| 101 |
+
if current_id is None:
|
| 102 |
+
yield messages
|
| 103 |
+
return
|
| 104 |
+
|
| 105 |
pad_id = current_tokenizer.pad_token_id
|
| 106 |
eos_id = current_tokenizer.eos_token_id
|
| 107 |
if pad_id is None:
|
| 108 |
pad_id = current_tokenizer.unk_token_id or 0
|
| 109 |
|
| 110 |
+
# Remove the initial welcome if present
|
| 111 |
+
filtered_messages = [msg for msg in messages if not (msg["role"] == "assistant" and "Welcome to the Radiologist's Companion" in msg["content"])]
|
|
|
|
|
|
|
| 112 |
|
| 113 |
+
# Build system context
|
| 114 |
system_messages = [
|
| 115 |
{
|
| 116 |
"role": "system",
|
|
|
|
| 135 |
}
|
| 136 |
]
|
| 137 |
|
| 138 |
+
full_messages = system_messages + filtered_messages
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 139 |
|
|
|
|
|
|
|
| 140 |
|
| 141 |
prompt = format_prompt(full_messages)
|
| 142 |
|
|
|
|
| 262 |
return "", patient_conversations[current_id]
|
| 263 |
|
| 264 |
|
|
|
|
| 265 |
def autofill_patient(patient_key):
|
| 266 |
if patient_key in patient_db:
|
| 267 |
info = patient_db[patient_key]
|
| 268 |
|
| 269 |
+
# Init empty conversation if not existing
|
| 270 |
if info["id"] not in patient_conversations:
|
| 271 |
+
patient_conversations[info["id"]] = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 272 |
|
| 273 |
return info["name"], info["age"], info["id"], info["notes"]
|
| 274 |
return "", "", "", ""
|
| 275 |
|
| 276 |
|
| 277 |
+
|
| 278 |
with gr.Blocks(css=".gradio-container {height: 100vh; overflow: hidden;}") as demo:
|
| 279 |
gr.Markdown("<h2 style='text-align: center;'>Radiologist's Companion</h2>")
|
| 280 |
|
|
|
|
| 320 |
# After patient selected, load their conversation into chatbot
|
| 321 |
def load_patient_conversation(patient_key):
|
| 322 |
if patient_key in patient_db:
|
| 323 |
+
patient_id_val = patient_db[patient_key]["id"]
|
| 324 |
+
history = patient_conversations.get(patient_id_val, [])
|
| 325 |
+
|
| 326 |
+
# Show welcome + history
|
| 327 |
+
welcome_message = {
|
| 328 |
+
"role": "assistant",
|
| 329 |
+
"content": (
|
| 330 |
+
"**Welcome to the Radiologist's Companion!**\n\n"
|
| 331 |
+
"You can ask me about the patient's medical history or available imaging data.\n"
|
| 332 |
+
"- I can summarize key details from the EHR.\n"
|
| 333 |
+
"- I can tell you which medical images are available.\n"
|
| 334 |
+
"- If you'd like an organ segmentation (e.g. spleen, liver, kidney_left, colon, femur_right) on an abdominal CT scan, just ask!\n\n"
|
| 335 |
+
"**Example Requests:**\n"
|
| 336 |
+
"- \"What do we know about this patient?\"\n"
|
| 337 |
+
"- \"Which images are available for this patient?\"\n"
|
| 338 |
+
"- \"Can you segment the spleen from the CT scan?\"\n"
|
| 339 |
+
)
|
| 340 |
+
}
|
| 341 |
+
return [welcome_message] + history
|
| 342 |
return []
|
| 343 |
|
| 344 |
+
|
| 345 |
patient_selector.change(
|
| 346 |
autofill_patient,
|
| 347 |
inputs=[patient_selector],
|