Skip to content

Commit 1f932e8

Browse files
fix: simplify async_conversation_run example to reduce CI flakiness
The original example used code_interpreter with differential equations, which caused timeouts and flaky CI failures. Simplified to "2+2" math. Original complex example preserved as async_conversation_run_code_interpreter.py and added to CI skip list (too slow/flaky for CI).
1 parent d4325cb commit 1f932e8

File tree

3 files changed

+63
-34
lines changed

3 files changed

+63
-34
lines changed

examples/mistral/agents/async_conversation_run.py

Lines changed: 5 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -9,48 +9,19 @@
99
MODEL = "mistral-medium-2505"
1010

1111

12-
def math_question_generator(question_num: int):
13-
"""Random generator of mathematical question
14-
15-
Args:
16-
question_num (int): the number of the question that will be returned, should be between 1-100
17-
"""
18-
return (
19-
"solve the following differential equation: `y'' + 3y' + 2y = 0`"
20-
if question_num % 2 == 0
21-
else "solve the following differential equation: `y'' - 4y' + 4y = e^x`"
22-
)
23-
24-
2512
async def main():
2613
api_key = os.environ["MISTRAL_API_KEY"]
2714
client = Mistral(api_key=api_key)
2815

29-
class Explanation(BaseModel):
30-
explanation: str
31-
output: str
32-
33-
class MathDemonstration(BaseModel):
34-
steps: list[Explanation]
35-
final_answer: str
16+
class MathResult(BaseModel):
17+
answer: int
3618

37-
async with RunContext(model=MODEL, output_format=MathDemonstration) as run_ctx:
38-
# register a new function that can be executed on the client side
39-
run_ctx.register_func(math_question_generator)
19+
async with RunContext(model=MODEL, output_format=MathResult) as run_ctx:
4020
run_result = await client.beta.conversations.run_async(
4121
run_ctx=run_ctx,
42-
instructions="Use the code interpreter to help you when asked mathematical questions.",
43-
inputs=[
44-
{"role": "user", "content": "hey"},
45-
{"role": "assistant", "content": "hello"},
46-
{"role": "user", "content": "Request a math question and answer it."},
47-
],
48-
tools=[{"type": "code_interpreter"}],
22+
inputs=[{"role": "user", "content": "What is 2 + 2?"}],
4923
)
50-
print("All run entries:")
51-
for entry in run_result.output_entries:
52-
print(f"{entry}")
53-
print(f"Final model: {run_result.output_as_model}")
24+
print(f"Result: {run_result.output_as_model}")
5425

5526

5627
if __name__ == "__main__":
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/usr/bin/env python
2+
import asyncio
3+
import os
4+
5+
from mistralai.client import Mistral
6+
from mistralai.extra.run.context import RunContext
7+
from mistralai.client.types import BaseModel
8+
9+
MODEL = "mistral-medium-2505"
10+
11+
12+
def math_question_generator(question_num: int):
13+
"""Random generator of mathematical question
14+
15+
Args:
16+
question_num (int): the number of the question that will be returned, should be between 1-100
17+
"""
18+
return (
19+
"solve the following differential equation: `y'' + 3y' + 2y = 0`"
20+
if question_num % 2 == 0
21+
else "solve the following differential equation: `y'' - 4y' + 4y = e^x`"
22+
)
23+
24+
25+
async def main():
26+
api_key = os.environ["MISTRAL_API_KEY"]
27+
client = Mistral(api_key=api_key)
28+
29+
class Explanation(BaseModel):
30+
explanation: str
31+
output: str
32+
33+
class MathDemonstration(BaseModel):
34+
steps: list[Explanation]
35+
final_answer: str
36+
37+
async with RunContext(model=MODEL, output_format=MathDemonstration) as run_ctx:
38+
# register a new function that can be executed on the client side
39+
run_ctx.register_func(math_question_generator)
40+
run_result = await client.beta.conversations.run_async(
41+
run_ctx=run_ctx,
42+
instructions="Use the code interpreter to help you when asked mathematical questions.",
43+
inputs=[
44+
{"role": "user", "content": "hey"},
45+
{"role": "assistant", "content": "hello"},
46+
{"role": "user", "content": "Request a math question and answer it."},
47+
],
48+
tools=[{"type": "code_interpreter"}],
49+
)
50+
print("All run entries:")
51+
for entry in run_result.output_entries:
52+
print(f"{entry}")
53+
print(f"Final model: {run_result.output_as_model}")
54+
55+
56+
if __name__ == "__main__":
57+
asyncio.run(main())

scripts/run_examples.sh

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ exclude_files=(
3333
"examples/mistral/classifier/async_classifier.py"
3434
"examples/mistral/mcp_servers/sse_server.py"
3535
"examples/mistral/mcp_servers/stdio_server.py"
36+
"examples/mistral/agents/async_conversation_run_code_interpreter.py"
3637
"examples/mistral/agents/async_conversation_run_stream.py"
3738
"examples/mistral/agents/async_conversation_run_mcp.py"
3839
"examples/mistral/agents/async_conversation_run_mcp_remote.py"

0 commit comments

Comments
 (0)