Update app.py
Browse files
app.py
CHANGED
|
@@ -95,8 +95,7 @@ async def run_tutor_dashboard(user_message):
|
|
| 95 |
|
| 96 |
return parse_agent_response(final_text)
|
| 97 |
|
| 98 |
-
# --- Gradio Dashboard UI
|
| 99 |
-
# We removed 'theme' and 'type="messages"' to ensure compatibility
|
| 100 |
with gr.Blocks(title="AI Python Tutor (MCP Dashboard)") as demo:
|
| 101 |
gr.Markdown("# ๐ Vibe Coding Academy")
|
| 102 |
gr.Markdown("Powered by **Nebius** (Llama 3.1) & **MCP** (Local Filesystem Access)")
|
|
@@ -104,8 +103,8 @@ with gr.Blocks(title="AI Python Tutor (MCP Dashboard)") as demo:
|
|
| 104 |
with gr.Row():
|
| 105 |
# Left Column: Chat & Input
|
| 106 |
with gr.Column(scale=2):
|
| 107 |
-
#
|
| 108 |
-
chatbot = gr.Chatbot(height=500, label="Tutor Chat")
|
| 109 |
|
| 110 |
msg = gr.Textbox(
|
| 111 |
label="1. What do you want to learn?",
|
|
@@ -120,23 +119,27 @@ with gr.Blocks(title="AI Python Tutor (MCP Dashboard)") as demo:
|
|
| 120 |
article_box = gr.Markdown(value="### ๐ Articles & Courses\n*Waiting for topic...*", label="3. Articles & Courses")
|
| 121 |
quiz_box = gr.Markdown(value="### ๐ง Quick Quiz\n*Waiting for topic...*", label="4. Knowledge Check")
|
| 122 |
|
| 123 |
-
# --- Interaction Logic (
|
| 124 |
async def respond(user_message, history):
|
| 125 |
-
# 1. Start with
|
| 126 |
-
# history is a list of lists: [['hi', 'hello'], ['user_msg', None]]
|
| 127 |
if history is None: history = []
|
| 128 |
-
history.append([user_message, None])
|
| 129 |
|
| 130 |
-
# 2.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
yield history, "", "", "", ""
|
| 132 |
|
| 133 |
-
#
|
| 134 |
chat_text, video_text, article_text, quiz_text = await run_tutor_dashboard(user_message)
|
| 135 |
|
| 136 |
-
#
|
| 137 |
-
history[-1][
|
| 138 |
|
| 139 |
-
#
|
| 140 |
yield history, "", video_text, article_text, quiz_text
|
| 141 |
|
| 142 |
# Wire up inputs/outputs using the 'respond' generator
|
|
|
|
| 95 |
|
| 96 |
return parse_agent_response(final_text)
|
| 97 |
|
| 98 |
+
# --- Gradio Dashboard UI ---
|
|
|
|
| 99 |
with gr.Blocks(title="AI Python Tutor (MCP Dashboard)") as demo:
|
| 100 |
gr.Markdown("# ๐ Vibe Coding Academy")
|
| 101 |
gr.Markdown("Powered by **Nebius** (Llama 3.1) & **MCP** (Local Filesystem Access)")
|
|
|
|
| 103 |
with gr.Row():
|
| 104 |
# Left Column: Chat & Input
|
| 105 |
with gr.Column(scale=2):
|
| 106 |
+
# FIXED: Explicitly set type="messages" to match the data format we are sending
|
| 107 |
+
chatbot = gr.Chatbot(height=500, label="Tutor Chat", type="messages")
|
| 108 |
|
| 109 |
msg = gr.Textbox(
|
| 110 |
label="1. What do you want to learn?",
|
|
|
|
| 119 |
article_box = gr.Markdown(value="### ๐ Articles & Courses\n*Waiting for topic...*", label="3. Articles & Courses")
|
| 120 |
quiz_box = gr.Markdown(value="### ๐ง Quick Quiz\n*Waiting for topic...*", label="4. Knowledge Check")
|
| 121 |
|
| 122 |
+
# --- Interaction Logic (Gradio 5 Messages Format) ---
|
| 123 |
async def respond(user_message, history):
|
| 124 |
+
# 1. Start with empty history if None
|
|
|
|
| 125 |
if history is None: history = []
|
|
|
|
| 126 |
|
| 127 |
+
# 2. Append User Message (Dictionary format)
|
| 128 |
+
history.append({"role": "user", "content": user_message})
|
| 129 |
+
|
| 130 |
+
# 3. Append Empty Assistant Message (Placeholder)
|
| 131 |
+
history.append({"role": "assistant", "content": ""})
|
| 132 |
+
|
| 133 |
+
# Yield state immediately to show user message
|
| 134 |
yield history, "", "", "", ""
|
| 135 |
|
| 136 |
+
# 4. Run the agent
|
| 137 |
chat_text, video_text, article_text, quiz_text = await run_tutor_dashboard(user_message)
|
| 138 |
|
| 139 |
+
# 5. Update the last history item (the assistant's placeholder)
|
| 140 |
+
history[-1]["content"] = chat_text
|
| 141 |
|
| 142 |
+
# 6. Yield final state
|
| 143 |
yield history, "", video_text, article_text, quiz_text
|
| 144 |
|
| 145 |
# Wire up inputs/outputs using the 'respond' generator
|