Request for help with multiagent_notebook's manager_agent.run step error

#96
by adurs2002 - opened

Notebook link: https://colab.research.google.com/drive/1DpnlhBor0zkfgNWxQmcxIVMCV0s3f_bZ?usp=sharing

Input

manager_agent.run("""
Find all Batman filming locations in the world, calculate the time to transfer via cargo plane to here (we're in Gotham, 40.7128° N, 74.0060° W).
Also give me some supercar factories with the same cargo plane transfer time. You need at least 6 points in total.
Represent this as spatial map of the world, with the locations represented as scatter points with a color that depends on the travel time, and save it to saved_map.png!

Here's an example of how to plot and return a map:
import plotly.express as px
df = px.data.carshare()
fig = px.scatter_map(df, lat="centroid_lat", lon="centroid_lon", text="name", color="peak_hour", size=100,
     color_continuous_scale=px.colors.sequential.Magma, size_max=15, zoom=1)
fig.show()
fig.write_image("saved_image.png")
final_answer(fig)

Never try to process strings using code: when you have a string to read, just print it and you'll see it.
""")

Output and Error

╭──────────────────────────────────────────────────── New run ────────────────────────────────────────────────────╮
│                                                                                                                 │
│ Find all Batman filming locations in the world, calculate the time to transfer via cargo plane to here (we're   │
│ in Gotham, 40.7128° N, 74.0060° W).                                                                             │
│ Also give me some supercar factories with the same cargo plane transfer time. You need at least 6 points in     │
│ total.                                                                                                          │
│ Represent this as spatial map of the world, with the locations represented as scatter points with a color that  │
│ depends on the travel time, and save it to saved_map.png!                                                       │
│                                                                                                                 │
│ Here's an example of how to plot and return a map:                                                              │
│ import plotly.express as px                                                                                     │
│ df = px.data.carshare()                                                                                         │
│ fig = px.scatter_map(df, lat="centroid_lat", lon="centroid_lon", text="name", color="peak_hour", size=100,      │
│      color_continuous_scale=px.colors.sequential.Magma, size_max=15, zoom=1)                                    │
│ fig.show()                                                                                                      │
│ fig.write_image("saved_image.png")                                                                              │
│ final_answer(fig)                                                                                               │
│                                                                                                                 │
│ Never try to process strings using code: when you have a string to read, just print it and you'll see it.       │
│                                                                                                                 │
╰─ InferenceClientModel - deepseek-ai/DeepSeek-R1 ────────────────────────────────────────────────────────────────╯
---------------------------------------------------------------------------
HTTPError                                 Traceback (most recent call last)
/usr/local/lib/python3.11/dist-packages/huggingface_hub/utils/_http.py in hf_raise_for_status(response, endpoint_name)
    408     try:
--> 409         response.raise_for_status()
    410     except HTTPError as e:

8 frames
/usr/local/lib/python3.11/dist-packages/requests/models.py in raise_for_status(self)
   1023         if http_error_msg:
-> 1024             raise HTTPError(http_error_msg, response=self)
   1025 

HTTPError: 504 Server Error: Gateway Time-out for url: https://router.huggingface.co/together/v1/chat/completions

The above exception was the direct cause of the following exception:

HfHubHTTPError                            Traceback (most recent call last)
<ipython-input-34-cb453d7649a1> in <cell line: 0>()
----> 1 manager_agent.run("""
      2 Find all Batman filming locations in the world, calculate the time to transfer via cargo plane to here (we're in Gotham, 40.7128° N, 74.0060° W).
      3 Also give me some supercar factories with the same cargo plane transfer time. You need at least 6 points in total.
      4 Represent this as spatial map of the world, with the locations represented as scatter points with a color that depends on the travel time, and save it to saved_map.png!
      5 

/usr/local/lib/python3.11/dist-packages/smolagents/agents.py in run(self, task, stream, reset, images, additional_args, max_steps)
    397         # Outputs are returned only at the end. We only look at the last step.
    398 
--> 399         steps = list(self._run_stream(task=self.task, max_steps=max_steps, images=images))
    400         assert isinstance(steps[-1], FinalAnswerStep)
    401         output = steps[-1].output

/usr/local/lib/python3.11/dist-packages/smolagents/agents.py in _run_stream(self, task, max_steps, images)
    450                 planning_start_time = time.time()
    451                 planning_step = None
--> 452                 for element in self._generate_planning_step(
    453                     task, is_first_step=(self.step_number == 1), step=self.step_number
    454                 ):

/usr/local/lib/python3.11/dist-packages/smolagents/agents.py in _generate_planning_step(self, task, is_first_step, step)
    565                         yield event
    566             else:
--> 567                 plan_message = self.model.generate(input_messages, stop_sequences=["<end_plan>"])
    568                 plan_message_content = plan_message.content
    569                 input_tokens, output_tokens = (

/usr/local/lib/python3.11/dist-packages/smolagents/models.py in generate(self, messages, stop_sequences, response_format, tools_to_call_from, **kwargs)
   1307             **kwargs,
   1308         )
-> 1309         response = self.client.chat_completion(**completion_kwargs)
   1310 
   1311         self._last_input_token_count = response.usage.prompt_tokens

/usr/local/lib/python3.11/dist-packages/huggingface_hub/inference/_client.py in chat_completion(self, messages, model, stream, frequency_penalty, logit_bias, logprobs, max_tokens, n, presence_penalty, response_format, seed, stop, stream_options, temperature, tool_choice, tool_prompt, tools, top_logprobs, top_p, extra_body)
    921             api_key=self.token,
    922         )
--> 923         data = self._inner_post(request_parameters, stream=stream)
    924 
    925         if stream:

/usr/local/lib/python3.11/dist-packages/huggingface_hub/inference/_client.py in _inner_post(self, request_parameters, stream)
    277 
    278         try:
--> 279             hf_raise_for_status(response)
    280             return response.iter_lines() if stream else response.content
    281         except HTTPError as error:

/usr/local/lib/python3.11/dist-packages/huggingface_hub/utils/_http.py in hf_raise_for_status(response, endpoint_name)
    480         # Convert `HTTPError` into a `HfHubHTTPError` to display request information
    481         # as well (request id and/or server error message)
--> 482         raise _format(HfHubHTTPError, str(e), response) from e
    483 
    484 

HfHubHTTPError: 504 Server Error: Gateway Time-out for url: https://router.huggingface.co/together/v1/chat/completions

Sign up or log in to comment