Spaces:
Running
Running
Refactor respond function to use non-streaming API call for improved debugging and response handling
Browse files
app.py
CHANGED
|
@@ -127,31 +127,25 @@ def respond(
|
|
| 127 |
is_complete = False
|
| 128 |
|
| 129 |
try:
|
| 130 |
-
|
|
|
|
| 131 |
messages,
|
| 132 |
max_tokens=max_tokens,
|
| 133 |
-
stream=
|
| 134 |
temperature=temperature,
|
| 135 |
top_p=top_p,
|
| 136 |
-
)
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
current_history = history.copy() if history else []
|
| 146 |
-
current_history.append((message, response))
|
| 147 |
-
yield current_history, conversation_id
|
| 148 |
-
|
| 149 |
-
if is_complete or response:
|
| 150 |
-
final_history = history.copy() if history else []
|
| 151 |
-
final_history.append((message, response))
|
| 152 |
-
yield final_history, conversation_id
|
| 153 |
|
| 154 |
except Exception as e:
|
|
|
|
| 155 |
error_history = history.copy() if history else []
|
| 156 |
error_history.append((message, f"An error occurred: {str(e)}"))
|
| 157 |
yield error_history, conversation_id
|
|
@@ -199,6 +193,9 @@ def respond_and_clear(message, history, conversation_id):
|
|
| 199 |
# Get first response from generator
|
| 200 |
new_history, conv_id = next(response_generator)
|
| 201 |
|
|
|
|
|
|
|
|
|
|
| 202 |
return new_history, conv_id, "" # Clear message input
|
| 203 |
|
| 204 |
except Exception as e:
|
|
|
|
| 127 |
is_complete = False
|
| 128 |
|
| 129 |
try:
|
| 130 |
+
# Non-streaming version for debugging
|
| 131 |
+
full_response = client.chat_completion(
|
| 132 |
messages,
|
| 133 |
max_tokens=max_tokens,
|
| 134 |
+
stream=False,
|
| 135 |
temperature=temperature,
|
| 136 |
top_p=top_p,
|
| 137 |
+
)
|
| 138 |
+
|
| 139 |
+
response = full_response.choices[0].message.content
|
| 140 |
+
print(f"Debug - Full response from API: {response}")
|
| 141 |
+
|
| 142 |
+
# Return complete response immediately
|
| 143 |
+
final_history = history.copy() if history else []
|
| 144 |
+
final_history.append((message, response))
|
| 145 |
+
yield final_history, conversation_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 146 |
|
| 147 |
except Exception as e:
|
| 148 |
+
print(f"Debug - Error during API call: {str(e)}")
|
| 149 |
error_history = history.copy() if history else []
|
| 150 |
error_history.append((message, f"An error occurred: {str(e)}"))
|
| 151 |
yield error_history, conversation_id
|
|
|
|
| 193 |
# Get first response from generator
|
| 194 |
new_history, conv_id = next(response_generator)
|
| 195 |
|
| 196 |
+
# Debug the response
|
| 197 |
+
print("Debug - Final history:", new_history)
|
| 198 |
+
|
| 199 |
return new_history, conv_id, "" # Clear message input
|
| 200 |
|
| 201 |
except Exception as e:
|