Spaces:
Sleeping
Sleeping
Improve UI responsiveness and image display
Browse files- Fixed user message styling to appear in blue immediately when typed
- Optimized image display width to 900px with better centering
- Added captions to generated visualizations
- Eliminated UI lag by removing unnecessary reruns
- Fixed chat history display to avoid message duplication
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude <[email protected]>
app.py
CHANGED
|
@@ -498,8 +498,12 @@ def show_custom_response(response):
|
|
| 498 |
try:
|
| 499 |
if isinstance(content, str) and (content.endswith('.png') or content.endswith('.jpg')):
|
| 500 |
if os.path.exists(content):
|
| 501 |
-
# Display image
|
| 502 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 503 |
return {"is_image": True}
|
| 504 |
# Also handle case where content shows filename but we want to show image
|
| 505 |
elif isinstance(content, str) and any(ext in content for ext in ['.png', '.jpg']):
|
|
@@ -509,7 +513,11 @@ def show_custom_response(response):
|
|
| 509 |
if filename_match:
|
| 510 |
filename = filename_match.group(1)
|
| 511 |
if os.path.exists(filename):
|
| 512 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
| 513 |
return {"is_image": True}
|
| 514 |
except:
|
| 515 |
pass
|
|
@@ -518,8 +526,12 @@ def show_custom_response(response):
|
|
| 518 |
|
| 519 |
|
| 520 |
# Chat history
|
| 521 |
-
# Display chat history
|
| 522 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 523 |
status = show_custom_response(response)
|
| 524 |
|
| 525 |
# Show feedback section for assistant responses
|
|
@@ -618,17 +630,17 @@ if prompt and not st.session_state.get("processing"):
|
|
| 618 |
prompt = None
|
| 619 |
|
| 620 |
if prompt:
|
| 621 |
-
# Add user input to chat history
|
| 622 |
user_response = get_from_user(prompt)
|
| 623 |
st.session_state.responses.append(user_response)
|
| 624 |
|
|
|
|
|
|
|
|
|
|
| 625 |
# Set processing state
|
| 626 |
st.session_state.processing = True
|
| 627 |
st.session_state.current_model = model_name
|
| 628 |
st.session_state.current_question = prompt
|
| 629 |
-
|
| 630 |
-
# Rerun to show processing indicator
|
| 631 |
-
st.rerun()
|
| 632 |
|
| 633 |
# Process the question if we're in processing state
|
| 634 |
if st.session_state.get("processing"):
|
|
|
|
| 498 |
try:
|
| 499 |
if isinstance(content, str) and (content.endswith('.png') or content.endswith('.jpg')):
|
| 500 |
if os.path.exists(content):
|
| 501 |
+
# Display image with better styling and reasonable width
|
| 502 |
+
st.markdown("""
|
| 503 |
+
<div style='margin: 1rem 0; display: flex; justify-content: center;'>
|
| 504 |
+
</div>
|
| 505 |
+
""", unsafe_allow_html=True)
|
| 506 |
+
st.image(content, width=900, caption="Generated Visualization")
|
| 507 |
return {"is_image": True}
|
| 508 |
# Also handle case where content shows filename but we want to show image
|
| 509 |
elif isinstance(content, str) and any(ext in content for ext in ['.png', '.jpg']):
|
|
|
|
| 513 |
if filename_match:
|
| 514 |
filename = filename_match.group(1)
|
| 515 |
if os.path.exists(filename):
|
| 516 |
+
st.markdown("""
|
| 517 |
+
<div style='margin: 1rem 0; display: flex; justify-content: center;'>
|
| 518 |
+
</div>
|
| 519 |
+
""", unsafe_allow_html=True)
|
| 520 |
+
st.image(filename, width=900, caption="Generated Visualization")
|
| 521 |
return {"is_image": True}
|
| 522 |
except:
|
| 523 |
pass
|
|
|
|
| 526 |
|
| 527 |
|
| 528 |
# Chat history
|
| 529 |
+
# Display chat history (skip the last user message if currently processing to avoid duplication)
|
| 530 |
+
responses_to_show = st.session_state.responses
|
| 531 |
+
if st.session_state.get("processing") and len(responses_to_show) > 0 and responses_to_show[-1]["role"] == "user":
|
| 532 |
+
responses_to_show = responses_to_show[:-1]
|
| 533 |
+
|
| 534 |
+
for response_id, response in enumerate(responses_to_show):
|
| 535 |
status = show_custom_response(response)
|
| 536 |
|
| 537 |
# Show feedback section for assistant responses
|
|
|
|
| 630 |
prompt = None
|
| 631 |
|
| 632 |
if prompt:
|
| 633 |
+
# Add user input to chat history and display immediately
|
| 634 |
user_response = get_from_user(prompt)
|
| 635 |
st.session_state.responses.append(user_response)
|
| 636 |
|
| 637 |
+
# Display user message immediately with proper styling
|
| 638 |
+
show_custom_response(user_response)
|
| 639 |
+
|
| 640 |
# Set processing state
|
| 641 |
st.session_state.processing = True
|
| 642 |
st.session_state.current_model = model_name
|
| 643 |
st.session_state.current_question = prompt
|
|
|
|
|
|
|
|
|
|
| 644 |
|
| 645 |
# Process the question if we're in processing state
|
| 646 |
if st.session_state.get("processing"):
|