username
stringlengths
1
118
score
float64
0
100
timestamp
stringdate
2025-04-24 16:18:04
2025-09-18 20:11:52
code
stringlengths
10
42.3k
dorinaaliko
30
2025-05-11T16:01:41.526550+00:00
https://huggingface.co/spaces/dorinaaliko/Final_Assignment_Template/tree/main
userwithnoname0
15
2025-05-11T16:43:59.417687+00:00
https://huggingface.co/spaces/None/tree/main
NimishaSoman
0
2025-05-11T17:12:24.834697+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
sebastianfrench
50
2025-05-11T17:38:23.347846+00:00
https://huggingface.co/spaces/sebastianfrench/Final_Assignment/tree/main
CalmScout
0
2025-05-11T17:47:06.563030+00:00
https://huggingface.co/spaces/CalmScout/AI_agents_course_final_assignment/tree/main
CyberAssassin
90
2025-05-11T18:03:48.296235+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
AndresPG123
55
2025-05-11T18:08:27.383163+00:00
https://huggingface.co/spaces/None/tree/main
adrianam
45
2025-05-11T18:39:25.634933+00:00
https://huggingface.co/spaces/None/tree/main
RobertoA83
75
2025-05-11T19:06:32.985363+00:00
https://huggingface.co/spaces/RobertoA83/gaia-agents-course/tree/main
ankitbiswas008
0
2025-05-11T19:12:21.728795+00:00
https://huggingface.co/spaces/ankitbiswas008/Final_Assignment_Ankit_Biswas/tree/main
sseal
55
2025-05-11T19:17:11.212029+00:00
https://huggingface.co/spaces/sseal/Final_Assignment/tree/main
CaptainCodeGmbH
30
2025-05-11T19:29:11.160801+00:00
https://huggingface.co/spaces/CaptainCodeGmbH/ai-gaia-test/tree/main
kschuette
35
2025-05-11T19:44:44.010522+00:00
https://huggingface.co/spaces/kschuette/Final_Assignment_Template/tree/main
ifoukarakis
35
2025-05-11T19:53:36.207936+00:00
https://huggingface.co/spaces/None/tree/main
Sibga76
0
2025-05-11T20:15:19.662404+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
kolkhi
40
2025-05-11T20:36:46.575359+00:00
https://huggingface.co/spaces/kolkhi/agent_course_final/tree/main
vovarbv
55
2025-05-11T20:46:55.434331+00:00
https://huggingface.co/spaces/vovarbv/final_general/tree/main
RomanusOtiosus
90
2025-05-11T21:24:49.276700+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
m-bendik
45
2025-05-11T21:41:18.543470+00:00
https://huggingface.co/spaces/None/tree/main
tripincloud
75
2025-05-11T22:08:33.611475+00:00
https://huggingface.co/spaces/None/tree/main
seanrobbins
30
2025-05-11T22:17:27.064380+00:00
https://huggingface.co/spaces/seanrobbins/Final_Assignment_Template/tree/main
EddL
0
2025-05-12T00:49:00.183248+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
civerson916
70
2025-05-12T01:03:52.792731+00:00
https://huggingface.co/spaces/civerson916/Final_Assignment_Template/tree/main
OppaAI
55
2025-05-12T01:05:25.190026+00:00
https://huggingface.co/spaces/None/tree/main
sachin261
0
2025-05-12T01:19:12.044380+00:00
https://huggingface.co/spaces/sachin261/Final_Assignment_Template/tree/main
SiberianPM
60
2025-05-12T02:31:57.579936+00:00
https://huggingface.co/spaces/SiberianPM/agent_course_final_boss/tree/main
laurgranda001
30
2025-05-12T03:59:36.222934+00:00
https://huggingface.co/spaces/lauragranda001/agentes/tree/main
krsnewwave
25
2025-05-12T04:00:39.502288+00:00
https://huggingface.co/spaces/krsnewwave/Final_Assignment_Agents_Course/tree/main
lauragranda001
30
2025-05-12T04:02:25.813857+00:00
https://huggingface.co/spaces/lauragranda001/agentes/tree/main
kalamkhan
35
2025-05-12T04:09:07.157747+00:00
https://huggingface.co/spaces/kalamkhan/Final_Assignment_Template/tree/main
seasonwarez
15
2025-05-12T04:09:49.827964+00:00
https://huggingface.co/spaces/seasonwarez/Final_Assignment_Template/tree/main
mabelwang21
30
2025-05-12T04:16:54.216222+00:00
https://huggingface.co/spaces/mabelwang21/Agents_Final_Assignment/tree/main
Pragya0909
0
2025-05-12T05:31:42.982941+00:00
https://huggingface.co/spaces/Pragya0909/Final_Assignment_Template/tree/main
ardwen13777
0
2025-05-12T05:32:57.704448+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
moonmilo1108
45
2025-05-12T06:02:23.988678+00:00
https://huggingface.co/spaces/None/tree/main
tbartlett
45
2025-05-12T06:12:14.311453+00:00
https://huggingface.co/spaces/tbartlett/Final_Assignment_Template/tree/main
pietro29
55
2025-05-12T06:59:14.345075+00:00
https://huggingface.co/spaces/pietro29/Final_Assignment_Template/tree/main
z4hid
80
2025-05-12T07:50:31.833672+00:00
https://huggingface.co/spaces/z4hid/The-Agent/tree/main
vince
0
2025-05-12T08:39:10.226616+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
Carloooss-pm
45
2025-05-12T08:56:07.917963+00:00
https://huggingface.co/spaces/Carloooss-pm/Agents_Course_Final_Assignment/tree/main
oneal79
35
2025-05-12T09:32:12.140366+00:00
https://huggingface.co/spaces/None/tree/main
Usernameasd
55
2025-05-12T10:21:49.192532+00:00
https://huggingface.co/spaces/Usernameasd/Unit_4._Final_Project/tree/main
mock_user
0
2025-05-12T10:27:19.432484+00:00
https://huggingface.co/spaces/Toumaima/my_customisedAgent/tree/main
lukevo212
100
2025-05-12T10:38:09.524032+00:00
https://huggingface.co/spaces/ZeroTimo/RobotPai/tree/main
ivasija
85
2025-05-12T10:44:12.320463+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
fatihsen
95
2025-05-12T10:53:39.353981+00:00
https://huggingface.co/spaces/fisherman611/gaia-agent/tree/main
oumaima
0
2025-05-12T10:54:43.012432+00:00
https://huggingface.co/spaces/Toumaima/my_customisedAgent/tree/main
aacrypto
40
2025-05-12T11:09:20.009786+00:00
https://huggingface.co/spaces/aacrypto/Final_Assignment_Template/tree/main
pvoropaev
50
2025-05-12T11:16:23.293177+00:00
https://huggingface.co/spaces/pvoropaev/Final_Assignment_Template/tree/main
Arnajak
30
2025-05-12T11:51:24.192440+00:00
https://huggingface.co/spaces/None/tree/main
chiragkakad
40
2025-05-12T12:08:42.878869+00:00
https://huggingface.co/spaces/chiragkakad/Final_Assignment_Template/tree/main
Antoine101
30
2025-05-12T12:58:27.992432+00:00
https://huggingface.co/spaces/Antoine101/Final_Assignment_Template/tree/main
prozorov
100
2025-05-12T13:29:55.363263+00:00
https://huggingface.co/spaces/prozorov/AI_Course_Final_Assignment/tree/main
AlikelKyoka
95
2025-05-12T13:35:11.375660+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
whoshubham
55
2025-05-12T13:38:57.152538+00:00
https://huggingface.co/spaces/whoshubham/my_first_ai_agent/tree/main
Srishanthj
0
2025-05-12T13:57:17.324457+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
Sokol35
0
2025-05-12T14:42:32.698472+00:00
https://huggingface.co/spaces/Facelook/Final_Assignment_Template/tree/main
Logout (jesusvilela)
0
2025-05-12T14:57:19.226343+00:00
local_dev_env
no2-tiger
30
2025-05-12T15:12:22.006454+00:00
https://huggingface.co/spaces/None/tree/main
jdorney1999
70
2025-05-12T15:15:33.143175+00:00
https://huggingface.co/spaces/jdorney1999/Final_Assignment/tree/main
ankitaddya1
95
2025-05-12T15:18:37.818499+00:00
https://huggingface.co/spaces/ankitaddya1/AI_agent_final_proj/tree/main
aaaaaaaaaaaaaaaa
0
2025-05-12T15:25:51.584634+00:00
aaaaaaaaaaaaaaaa
aniqu18
30
2025-05-12T15:30:00.022740+00:00
https://huggingface.co/spaces/None/tree/main
funashi
35
2025-05-12T15:52:19.030874+00:00
https://huggingface.co/spaces/funashi/Final_Assignment/tree/main
marin-iuga
90
2025-05-12T15:59:31.811629+00:00
https://huggingface.co/spaces/marin-iuga/Final_Assignment_Template_1/tree/main
Saoussen-CH
0
2025-05-12T16:59:13.275618+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
clirimfurriku
50
2025-05-12T17:06:08.141430+00:00
https://huggingface.co/spaces/clirimfurriku/gaia_multitool_agent/tree/main
Mangb
55
2025-05-12T17:08:30.561793+00:00
https://huggingface.co/spaces/Mangb/tree/main
romain-lembo
90
2025-05-12T17:12:03.269056+00:00
https://huggingface.co/spaces/romain-lembo/Unit4-GAIA/tree/main
Anmol0201
0
2025-05-12T17:35:53.108056+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
cyberconnectbe
70
2025-05-12T18:03:30.630670+00:00
https://huggingface.co/spaces/cyberconnectbe/Final_Assignment_Template/tree/main
soneknikolaiev
0
2025-05-12T18:08:00.173782+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
ncuxomun
25
2025-05-12T19:11:52.426611+00:00
https://huggingface.co/spaces/ncuxomun/Final_Assignment_Template/tree/main
rtamaki
30
2025-05-12T19:48:50.298337+00:00
https://huggingface.co/spaces/rtamaki/HF_Agents_Course_Final_Assignment/tree/main
gatugood
55
2025-05-12T20:58:16.428327+00:00
https://huggingface.co/spaces/innafomina/Final_Assignment_Template/tree/main
KopperShield
65
2025-05-12T21:15:00.633650+00:00
https://huggingface.co/spaces/KopperShield/Agent_Final_Assignment/tree/main
Logout (Frest2)
15
2025-05-12T21:22:11.986968+00:00
https://huggingface.co/spaces/Frest2/Final_Assignment_Template/tree/main
kate0711
0
2025-05-12T22:26:15.099046+00:00
https://huggingface.co/spaces/kate0711/Final_Assignment_Template/tree/main
soaljack
30
2025-05-12T22:55:07.435293+00:00
https://huggingface.co/spaces/soaljack/Final_Assignment/tree/main
dezshredder
40
2025-05-12T23:01:21.586187+00:00
https://huggingface.co/spaces/dezshredder/HF_AgentsCourse_Final_Assignment/tree/main
xcauex
55
2025-05-12T23:41:26.819330+00:00
https://huggingface.co/spaces/xcauex/Final_Assignment_Template/tree/main
luping85
35
2025-05-13T00:34:22.430409+00:00
https://huggingface.co/spaces/luping85/Agent_Course_Final_Assignment/tree/main
nicksebald
30
2025-05-13T02:10:17.457637+00:00
https://huggingface.co/spaces/nicksebald/Final_Assignment/tree/main
wolgwang
90
2025-05-13T02:53:26.632408+00:00
https://huggingface.co/spaces/wolgwang/Final_Agent/tree/main
ehottl
90
2025-05-13T05:28:57.985290+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
florspag
30
2025-05-13T05:32:28.228739+00:00
https://huggingface.co/spaces/florspag/agent-course-final-assignment/tree/main
WK512
0
2025-05-13T06:32:15.118764+00:00
https://huggingface.co/spaces/agents-course/Final_Assignment_Template/tree/main
eugenewhy
0
2025-05-13T06:49:03.529099+00:00
https://huggingface.co/spaces/eugenewhy/Final_Assignment_Template/tree/main
DShah007
40
2025-05-13T07:59:34.045589+00:00
https://huggingface.co/spaces/DShah007/agent-final-course/tree/main
ASu1
85
2025-05-13T08:01:40.204060+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
kishan-patel-dev
90
2025-05-13T08:23:19.382315+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
KonuTech
90
2025-05-13T09:44:25.950037+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
Itz-Amethyst
95
2025-05-13T11:53:46.545309+00:00
https://huggingface.co/spaces/None/tree/main
ZyzzSonOfZeus
30
2025-05-13T13:33:44.577921+00:00
https://huggingface.co/spaces/ZyzzSonOfZeus/Final_Assignment_Template_ZyzzAI/tree/main
samsonDzealot
0
2025-05-13T13:37:23.920281+00:00
https://huggingface.co/spaces/samsonDzealot/Final_Assignment_Agent_Course/tree/main
arman1611
75
2025-05-13T13:43:56.821172+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
atruji
90
2025-05-13T14:03:44.450095+00:00
https://huggingface.co/spaces/baixianger/RobotPai/tree/main
Shreyashgupta5
35
2025-05-13T14:07:47.491434+00:00
{ "cells": [ { "cell_type": "code", "execution_count": 94, "metadata": {}, "outputs": [], "source": [ "# %pip install -q langfuse wikipedia openai google-search-results pandas openai-whisper ffmpeg-python openpyxl" ] }, { "cell_type": "code", "execution_count": 95, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Config loaded. Sensitive keys available for use.\n", "Notebook variables set:\n", " model_name: gpt-4.1-2025-04-14\n", " temperature: 0.2\n", " verbose: True\n", " use_langfuse: True\n", " username: Shreyashgupta5\n", " code_link: https://huggingface.co/spaces/Shreyashgupta5/ai_agents_course\n", " api_base_url: https://agents-course-unit4-scoring.hf.space\n" ] } ], "source": [ "# Step 1: Load Configuration and Dependencies\n", "\n", "import json\n", "\n", "# Load sensitive config from config.json\n", "with open('config.json', 'r') as f:\n", " config = json.load(f)\n", "\n", "# Set notebook variables (these should match what you set in your notebook)\n", "temperature = 0.2\n", "verbose = True\n", "use_langfuse = True\n", "# model_name = \"o4-mini-2025-04-16\"\n", "model_name = \"gpt-4.1-2025-04-14\"\n", "username = \"Shreyashgupta5\"\n", "code_link = \"https://huggingface.co/spaces/Shreyashgupta5/ai_agents_course\"\n", "api_base_url = \"https://agents-course-unit4-scoring.hf.space\"\n", "\n", "# Print to verify\n", "print(\"Config loaded. Sensitive keys available for use.\")\n", "print(\"Notebook variables set:\")\n", "print(f\" model_name: {model_name}\")\n", "print(f\" temperature: {temperature}\")\n", "print(f\" verbose: {verbose}\")\n", "print(f\" use_langfuse: {use_langfuse}\")\n", "print(f\" username: {username}\")\n", "print(f\" code_link: {code_link}\")\n", "print(f\" api_base_url: {api_base_url}\")" ] }, { "cell_type": "code", "execution_count": 96, "metadata": {}, "outputs": [], "source": [ "# Step 2: Configure Langfuse Decorator-Based Client\n", "\n", "from langfuse.decorators import langfuse_context\n", "\n", "langfuse_context.configure(\n", " secret_key=config[\"langfuse_secret\"],\n", " public_key=config[\"langfuse_public_key\"],\n", " host=config[\"host\"]\n", ")" ] }, { "cell_type": "code", "execution_count": 97, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Task ID: 8e867cd7-cff9-4e6c-867a-ff5ddc2550be\n", "Question: How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia.\n", "----------------------------------------\n", "Task ID: a1e91b78-d3d8-4675-bb8d-62741b4b68a6\n", "Question: In the video https://www.youtube.com/watch?v=L1vXCYZAYYM, what is the highest number of bird species to be on camera simultaneously?\n", "----------------------------------------\n", "Task ID: 2d83110e-a098-4ebb-9987-066c06fa42d0\n", "Question: .rewsna eht sa \"tfel\" drow eht fo etisoppo eht etirw ,ecnetnes siht dnatsrednu uoy fI\n", "----------------------------------------\n", "Task ID: cca530fc-4052-43b2-b130-b30968d8aa44\n", "Question: Review the chess position provided in the image. It is black's turn. Provide the correct next move for black which guarantees a win. Please provide your response in algebraic notation.\n", "File: files-for-agent/cca530fc-4052-43b2-b130-b30968d8aa44.png\n", "----------------------------------------\n", "Task ID: 4fc2f1ae-8625-45b5-ab34-ad4433bc21f8\n", "Question: Who nominated the only Featured Article on English Wikipedia about a dinosaur that was promoted in November 2016?\n", "----------------------------------------\n", "Task ID: 6f37996b-2ac7-44b0-8e68-6d28256631b4\n", "Question: Given this table defining * on the set S = {a, b, c, d, e}\n", "\n", "|*|a|b|c|d|e|\n", "|---|---|---|---|---|---|\n", "|a|a|b|c|b|d|\n", "|b|b|c|a|e|c|\n", "|c|c|a|b|b|a|\n", "|d|b|e|b|e|d|\n", "|e|d|b|a|d|c|\n", "\n", "provide the subset of S involved in any possible counter-examples that prove * is not commutative. Provide your answer as a comma separated list of the elements in the set in alphabetical order.\n", "----------------------------------------\n", "Task ID: 9d191bce-651d-4746-be2d-7ef8ecadb9c2\n", "Question: Examine the video at https://www.youtube.com/watch?v=1htKBjuUWec.\n", "\n", "What does Teal'c say in response to the question \"Isn't that hot?\"\n", "----------------------------------------\n", "Task ID: cabe07ed-9eca-40ea-8ead-410ef5e83f91\n", "Question: What is the surname of the equine veterinarian mentioned in 1.E Exercises from the chemistry materials licensed by Marisa Alviar-Agnew & Henry Agnew under the CK-12 license in LibreText's Introductory Chemistry materials as compiled 08/21/2023?\n", "----------------------------------------\n", "Task ID: 3cef3a44-215e-4aed-8e3b-b1e3f08063b7\n", "Question: I'm making a grocery list for my mom, but she's a professor of botany and she's a real stickler when it comes to categorizing things. I need to add different foods to different categories on the grocery list, but if I make a mistake, she won't buy anything inserted in the wrong category. Here's the list I have so far:\n", "\n", "milk, eggs, flour, whole bean coffee, Oreos, sweet potatoes, fresh basil, plums, green beans, rice, corn, bell pepper, whole allspice, acorns, broccoli, celery, zucchini, lettuce, peanuts\n", "\n", "I need to make headings for the fruits and vegetables. Could you please create a list of just the vegetables from my list? If you could do that, then I can figure out how to categorize the rest of the list into the appropriate categories. But remember that my mom is a real stickler, so make sure that no botanical fruits end up on the vegetable list, or she won't get them when she's at the store. Please alphabetize the list of vegetables, and place each item in a comma separated list.\n", "----------------------------------------\n", "Task ID: 99c9cc74-fdc8-46c6-8f8d-3ce2d3bfeea3\n", "Question: Hi, I'm making a pie but I could use some help with my shopping list. I have everything I need for the crust, but I'm not sure about the filling. I got the recipe from my friend Aditi, but she left it as a voice memo and the speaker on my phone is buzzing so I can't quite make out what she's saying. Could you please listen to the recipe and list all of the ingredients that my friend described? I only want the ingredients for the filling, as I have everything I need to make my favorite pie crust. I've attached the recipe as Strawberry pie.mp3.\n", "\n", "In your response, please only list the ingredients, not any measurements. So if the recipe calls for \"a pinch of salt\" or \"two cups of ripe strawberries\" the ingredients on the list would be \"salt\" and \"ripe strawberries\".\n", "\n", "Please format your response as a comma separated list of ingredients. Also, please alphabetize the ingredients.\n", "File: files-for-agent/99c9cc74-fdc8-46c6-8f8d-3ce2d3bfeea3.mp3\n", "----------------------------------------\n", "Task ID: 305ac316-eef6-4446-960a-92d80d542f82\n", "Question: Who did the actor who played Ray in the Polish-language version of Everybody Loves Raymond play in Magda M.? Give only the first name.\n", "----------------------------------------\n", "Task ID: f918266a-b3e0-4914-865d-4faa564f1aef\n", "Question: What is the final numeric output from the attached Python code?\n", "File: files-for-agent/f918266a-b3e0-4914-865d-4faa564f1aef.py\n", "----------------------------------------\n", "Task ID: 3f57289b-8c60-48be-bd80-01f8099ca449\n", "Question: How many at bats did the Yankee with the most walks in the 1977 regular season have that same season?\n", "----------------------------------------\n", "Task ID: 1f975693-876d-457b-a649-393859e79bf3\n", "Question: Hi, I was out sick from my classes on Friday, so I'm trying to figure out what I need to study for my Calculus mid-term next week. My friend from class sent me an audio recording of Professor Willowbrook giving out the recommended reading for the test, but my headphones are broken :(\n", "\n", "Could you please listen to the recording for me and tell me the page numbers I'm supposed to go over? I've attached a file called Homework.mp3 that has the recording. Please provide just the page numbers as a comma-delimited list. And please provide the list in ascending order.\n", "File: files-for-agent/1f975693-876d-457b-a649-393859e79bf3.mp3\n", "----------------------------------------\n", "Task ID: 840bfca7-4f7b-481a-8794-c560c340185d\n", "Question: On June 6, 2023, an article by Carolyn Collins Petersen was published in Universe Today. This article mentions a team that produced a paper about their observations, linked at the bottom of the article. Find this paper. Under what NASA award number was the work performed by R. G. Arendt supported by?\n", "----------------------------------------\n", "Task ID: bda648d7-d618-4883-88f4-3466eabd860e\n", "Question: Where were the Vietnamese specimens described by Kuznetzov in Nedoshivina's 2010 paper eventually deposited? Just give me the city name without abbreviations.\n", "----------------------------------------\n", "Task ID: cf106601-ab4f-4af9-b045-5295fe67b37d\n", "Question: What country had the least number of athletes at the 1928 Summer Olympics? If there's a tie for a number of athletes, return the first in alphabetical order. Give the IOC country code as your answer.\n", "----------------------------------------\n", "Task ID: a0c07678-e491-4bbc-8f0b-07405144218f\n", "Question: Who are the pitchers with the number before and after Taishō Tamai's number as of July 2023? Give them to me in the form Pitcher Before, Pitcher After, use their last names only, in Roman characters.\n", "----------------------------------------\n", "Task ID: 7bd855d8-463d-4ed5-93ca-5fe35145f733\n", "Question: The attached Excel file contains the sales of menu items for a local fast-food chain. What were the total sales that the chain made from food (not including drinks)? Express your answer in USD with two decimal places.\n", "File: files-for-agent/7bd855d8-463d-4ed5-93ca-5fe35145f733.xlsx\n", "----------------------------------------\n", "Task ID: 5a0c1adf-205e-4841-a666-7c3ef95def9d\n", "Question: What is the first name of the only Malko Competition recipient from the 20th Century (after 1977) whose nationality on record is a country that no longer exists?\n", "----------------------------------------\n" ] } ], "source": [ "# Step 3: Load Questions and Download Required Files\n", "\n", "import json\n", "import os\n", "import requests\n", "\n", "# Set how many questions you want to process\n", "NUM_QUESTIONS_TO_RUN = 20 # <--- Change this number as needed\n", "\n", "# Load all questions from all-json/all_questions.json\n", "with open('all-json/all_questions.json', 'r') as f:\n", " all_questions = json.load(f)\n", "\n", "# Only process up to NUM_QUESTIONS_TO_RUN questions\n", "questions = all_questions[:NUM_QUESTIONS_TO_RUN]\n", "\n", "# Helper: Download file if it's not already present (supports images, Excel, CSV, etc.)\n", "def download_file_if_needed(q, api_base_url):\n", " file_name = q.get(\"file_name\")\n", " if file_name:\n", " file_url = f\"{api_base_url}/files/{q['task_id']}\"\n", " file_path = os.path.join(\"files-for-agent\", file_name)\n", " if not os.path.exists(file_path):\n", " r = requests.get(file_url)\n", " if r.status_code == 200:\n", " with open(file_path, \"wb\") as f:\n", " f.write(r.content)\n", " print(f\"Downloaded file for task {q['task_id']}: {file_path}\")\n", " else:\n", " print(f\"Failed to download file for task {q['task_id']}: {r.status_code}\")\n", "\n", "# Download files for relevant questions (images, Excel, CSV, etc.)\n", "for q in questions:\n", " download_file_if_needed(q, api_base_url)\n", "\n", "# Print out each question's task_id and question text for verification\n", "for q in questions:\n", " print(f\"Task ID: {q['task_id']}\")\n", " print(f\"Question: {q['question']}\")\n", " if q.get(\"file_name\"):\n", " print(f\"File: files-for-agent/{q['file_name']}\")\n", " print(\"-\" * 40)" ] }, { "cell_type": "code", "execution_count": 98, "metadata": {}, "outputs": [], "source": [ "# Step 4: Define Tools\n", "\n", "import os\n", "import requests\n", "import wikipedia\n", "import pandas as pd\n", "from langfuse.decorators import observe\n", "import base64\n", "import mimetypes\n", "\n", "# Wikipedia Search Tool\n", "@observe()\n", "def wikipedia_search(query, sentences=2):\n", " try:\n", " summary = wikipedia.summary(query, sentences=sentences, auto_suggest=True, redirect=True)\n", " return summary\n", " except wikipedia.DisambiguationError as e:\n", " return f\"Disambiguation error. Options: {e.options[:5]}\"\n", " except wikipedia.PageError:\n", " return \"No Wikipedia page found for the query.\"\n", " except Exception as e:\n", " return f\"Error: {str(e)}\"\n", "\n", "# SerpAPI Web Search Tool\n", "@observe()\n", "def serpapi_search(query):\n", " api_key = config.get(\"SERPAPI_API_KEY\") or os.environ.get(\"SERPAPI_API_KEY\")\n", " if not api_key:\n", " return \"No SerpAPI key provided.\"\n", " params = {\n", " \"q\": query,\n", " \"api_key\": api_key,\n", " \"engine\": \"google\",\n", " \"num\": 3\n", " }\n", " response = requests.get(\"https://serpapi.com/search\", params=params)\n", " if response.status_code == 200:\n", " data = response.json()\n", " if \"answer_box\" in data and \"answer\" in data[\"answer_box\"]:\n", " return data[\"answer_box\"][\"answer\"]\n", " elif \"organic_results\" in data and len(data[\"organic_results\"]) > 0:\n", " return data[\"organic_results\"][0].get(\"snippet\", \"No snippet found.\")\n", " else:\n", " return \"No relevant results found.\"\n", " else:\n", " return f\"SerpAPI error: {response.status_code} {response.text}\"\n", "\n", "@observe()\n", "def parse_excel_csv(file_name):\n", " \"\"\"\n", " Reads an Excel or CSV file and extracts schema, sample data, and summary statistics according to what the question asks for.\n", " \"\"\"\n", " import os\n", " file_path = os.path.join(\"files-for-agent\", file_name)\n", " try:\n", " if file_path.lower().endswith('.csv'):\n", " df = pd.read_csv(file_path)\n", " else:\n", " df = pd.read_excel(file_path)\n", " schema = df.columns.tolist()\n", " dtypes = df.dtypes.astype(str).to_dict()\n", " sample = df.head(3).to_dict(orient=\"records\")\n", " stats = df.describe(include='all').to_dict()\n", " return {\n", " \"schema\": schema,\n", " \"dtypes\": dtypes,\n", " \"sample\": sample,\n", " \"stats\": stats\n", " }\n", " except Exception as e:\n", " return f\"Error parsing file {file_path}: {str(e)}\"\n", "\n", "@observe()\n", "def analyze_image_with_vision(file_name, question, model_name, temperature):\n", " \"\"\"\n", " Uses a vision-capable model to analyze an image and answer a question about it.\n", " \"\"\"\n", " import openai\n", " import os\n", " file_path = os.path.join(\"files-for-agent\", file_name)\n", " api_key = config.get(\"openai_api_key\")\n", " client = openai.OpenAI(api_key=api_key)\n", " # Detect MIME type\n", " mime_type, _ = mimetypes.guess_type(file_path)\n", " if mime_type is None:\n", " mime_type = \"image/png\" # fallback\n", " with open(file_path, \"rb\") as img_file:\n", " image_bytes = img_file.read()\n", " base64_image = base64.b64encode(image_bytes).decode(\"utf-8\")\n", " data_url = f\"data:{mime_type};base64,{base64_image}\"\n", " response = client.chat.completions.create(\n", " model=model_name,\n", " messages=[\n", " {\"role\": \"user\", \"content\": [\n", " {\"type\": \"text\", \"text\": question},\n", " {\"type\": \"image_url\", \"image_url\": {\"url\": data_url}}\n", " ]}\n", " ],\n", " temperature=temperature\n", " )\n", " return response.choices[0].message.content\n", "\n", "# Whisper Audio Transcription Tool\n", "@observe()\n", "def transcribe_audio_with_whisper(file_name, model_size=\"base\"):\n", " \"\"\"\n", " Transcribes an audio file (e.g., .mp3) using OpenAI Whisper.\n", " Returns the transcribed text.\n", " \"\"\"\n", " import whisper\n", " file_path = os.path.join(\"files-for-agent\", file_name)\n", " try:\n", " model = whisper.load_model(model_size)\n", " result = model.transcribe(file_path)\n", " return result[\"text\"]\n", " except Exception as e:\n", " return f\"Error transcribing audio {file_path}: {str(e)}\"" ] }, { "cell_type": "code", "execution_count": 99, "metadata": {}, "outputs": [], "source": [ "# Step 5: Agent Planning Step (OpenAI v1.x+)\n", "\n", "import openai\n", "from langfuse.decorators import observe # (if not already imported)\n", "\n", "# Set your OpenAI API key from config\n", "client = openai.OpenAI(api_key=config[\"openai_api_key\"])\n", "\n", "@observe(as_type=\"generation\")\n", "def get_agent_plan(question, model_name, temperature, file_name=None):\n", " \"\"\"\n", " Sends the question to the model and asks for a plan and tool list.\n", " The prompt now describes the Excel/CSV tool.\n", " \"\"\"\n", " prompt = (\n", " \"You are an AI agent. Here is a question you need to answer:\\n\"\n", " f\"Question: {question}\\n\\n\"\n", " \"You have access to the following tools:\\n\"\n", " \"- Wikipedia Search: For factual and encyclopedic information.\\n\"\n", " \"- SerpAPI Web Search: For general web search (Google, Bing, etc.).\\n\"\n", " \"- Excel/CSV File Parser: For reading Excel or CSV files attached to the question.\\n\"\n", " \"- Image Analysis (Vision): For analyzing images attached to the question. This tool sends the image and the question to a vision-capable model and returns the model's answer.\\n\\n\"\n", " \"- Audio Transcription(Whisper): For transcribing audio files (eg: .mp3) to text that are attached to the question. This tool sends the audio file to a transcription model and returns the transcribed text from OpenAI Whisper.\\n\\n\"\n", " \"If a file is attached and it is an image (e.g., .png, .jpg, .jpeg), use the Image Analysis tool to analyze the image and answer the question.\\n\"\n", " \"Create a step-by-step plan to answer this question. For each step, specify which tool you would use and why. \"\n", " \"Be explicit about your reasoning for tool selection.\"\n", " )\n", " if file_name:\n", " prompt += f\"\\n\\nA file is attached: {file_name}\"\n", " response = client.chat.completions.create(\n", " model=model_name,\n", " messages=[{\"role\": \"user\", \"content\": prompt}],\n", " temperature=temperature\n", " )\n", " plan = response.choices[0].message.content\n", " return plan" ] }, { "cell_type": "code", "execution_count": 100, "metadata": {}, "outputs": [], "source": [ "# Step 6: Tool Execution Step\n", "\n", "from langfuse.decorators import observe\n", "import re\n", "\n", "def execute_tools(plan, question, file_name=None, temperature=temperature):\n", " tool_outputs = {}\n", " plan_lower = plan.lower()\n", "\n", " def tool_in_plan(tool_name):\n", " return tool_name in plan_lower\n", " \n", " # Audio file transcription\n", " if file_name and file_name.lower().endswith(('.mp3', '.wav', '.m4a', '.flac', '.ogg')):\n", " # Always transcribe audio if present\n", " audio_result = transcribe_audio_with_whisper(file_name)\n", " tool_outputs['audio_transcription'] = audio_result\n", "\n", " # Image file analysis\n", " if file_name and file_name.lower().endswith(('.png', '.jpg', '.jpeg')):\n", " if tool_in_plan(\"image analysis\") or tool_in_plan(\"vision\") or tool_in_plan(\"analyze image\"):\n", " image_result = analyze_image_with_vision(file_name, question, model_name, temperature)\n", " tool_outputs['image_analysis'] = image_result\n", " else:\n", " # Optionally, always analyze if image is present\n", " image_result = analyze_image_with_vision(file_name, question, model_name, temperature)\n", " tool_outputs['image_analysis'] = image_result\n", "\n", " # Excel/CSV file parsing (existing)\n", " if file_name and file_name.lower().endswith(('.xlsx', '.xls', '.csv')):\n", " excel_result = parse_excel_csv(file_name)\n", " tool_outputs['excel_csv'] = excel_result\n", "\n", " # Wikipedia\n", " if tool_in_plan(\"wikipedia\"):\n", " wiki_result = wikipedia_search(question)\n", " tool_outputs['wikipedia'] = wiki_result\n", " else:\n", " wiki_result = None\n", "\n", " # SerpAPI Web Search\n", " if tool_in_plan(\"serpapi web\") or tool_in_plan(\"web search\") or tool_in_plan(\"serpapi search\") or tool_in_plan(\"google search\"):\n", " serp_result = serpapi_search(question)\n", " tool_outputs['serpapi'] = serp_result\n", "\n", " # Fallback: If Wikipedia was run and is insufficient, and SerpAPI web search wasn't already run, run it\n", " fallback_needed = (\n", " wiki_result is not None and (\n", " \"no wikipedia page found\" in wiki_result.lower() or\n", " \"disambiguation error\" in wiki_result.lower() or\n", " \"error:\" in wiki_result.lower() or\n", " len(wiki_result) < 50\n", " ) and 'serpapi' not in tool_outputs\n", " )\n", " if fallback_needed:\n", " serp_result = serpapi_search(question)\n", " tool_outputs['serpapi'] = serp_result\n", " print(\"Fallback: Used SerpAPI web search due to insufficient Wikipedia result.\")\n", "\n", " print(\"Tools used for this question:\", list(tool_outputs.keys()))\n", " return tool_outputs" ] }, { "cell_type": "code", "execution_count": 101, "metadata": {}, "outputs": [], "source": [ "# Step 7: Synthesis Step (OpenAI v1.x+)\n", "\n", "from langfuse.decorators import observe # (if not already imported)\n", "\n", "@observe(as_type=\"generation\")\n", "def synthesize_final_answer(task_id, question, tool_outputs, gaia_doc, model_name, temperature):\n", " \"\"\"\n", " Uses the model to synthesize a final answer in GAIA format.\n", " The prompt is enhanced to force careful question analysis, step-by-step reasoning, and self-critique.\n", " \"\"\"\n", " prompt = (\n", " f\"You are an AI agent participating in the GAIA benchmark.\\n\"\n", " f\"Here is the official GAIA documentation for answer formatting:\\n\\n\"\n", " f\"{gaia_doc}\\n\\n\"\n", " f\"Here is the original question:\\n{question}\\n\\n\"\n", " f\"Here are the outputs from the tools you used:\\n{tool_outputs}\\n\\n\"\n", " \"Your task is to generate the most accurate and complete answer possible, strictly following the question's requirements and the GAIA format.\\n\\n\"\n", " \"Follow these steps:\\n\"\n", " \"1. Carefully re-read and break down the question. List all requirements, constraints, and any special instructions (e.g., only list ingredients for the filling, exclude measurements, alphabetize, etc.).\\n\"\n", " \"2. List all food items or ingredients mentioned in the tool outputs.\\n\"\n", " \"3. For each candidate ingredient, explain why it should be considered a core filling ingredient (i.e., combined with the others to form the main filling mixture). Exclude any ingredient that is only added after the main mixture is cooked, or is a topping, garnish, or finishing touch. If unsure, err on the side of excluding ambiguous items.\\n\"\n", " \"4. For each page number mentioned in the tool outputs, explain whether it is a required review (explicitly told to go over), an optional suggestion, or just mentioned in passing. Only include required review pages in your final answer, unless the question asks for all possible relevant pages.\"\n", " \"5. Step-by-step, reason through how to answer the question, making sure to address every requirement and constraint you listed in step 1.\\n\"\n", " \"6. Critique your draft answer: Does it fully and precisely answer the question? Does it include anything extra or miss anything required? If so, revise it.\\n\"\n", " \"7. Only after this, output the final answer in the required GAIA JSON format. Only output the JSON object, nothing else.\\n\"\n", " \"Show your work for steps 1-5 as comments (using lines starting with #), then output the JSON answer.\"\n", " )\n", " response = client.chat.completions.create(\n", " model=model_name,\n", " messages=[{\"role\": \"user\", \"content\": prompt}],\n", " temperature=temperature\n", " )\n", " final_answer_json = response.choices[0].message.content\n", " return final_answer_json" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Processing Task ID: 8e867cd7-cff9-4e6c-867a-ff5ddc2550be\n", "Tools used for this question: ['wikipedia']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: a1e91b78-d3d8-4675-bb8d-62741b4b68a6\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 2d83110e-a098-4ebb-9987-066c06fa42d0\n", "Tools used for this question: []\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: cca530fc-4052-43b2-b130-b30968d8aa44\n", "Tools used for this question: ['image_analysis']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 4fc2f1ae-8625-45b5-ab34-ad4433bc21f8\n", "Tools used for this question: ['wikipedia']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 6f37996b-2ac7-44b0-8e68-6d28256631b4\n", "Tools used for this question: []\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 9d191bce-651d-4746-be2d-7ef8ecadb9c2\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: cabe07ed-9eca-40ea-8ead-410ef5e83f91\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 3cef3a44-215e-4aed-8e3b-b1e3f08063b7\n", "Fallback: Used SerpAPI web search due to insufficient Wikipedia result.\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 99c9cc74-fdc8-46c6-8f8d-3ce2d3bfeea3\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/opt/anaconda3/envs/ai_agents_course/lib/python3.12/site-packages/whisper/transcribe.py:126: UserWarning: FP16 is not supported on CPU; using FP32 instead\n", " warnings.warn(\"FP16 is not supported on CPU; using FP32 instead\")\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Tools used for this question: ['audio_transcription']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 305ac316-eef6-4446-960a-92d80d542f82\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: f918266a-b3e0-4914-865d-4faa564f1aef\n", "Tools used for this question: ['wikipedia']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 3f57289b-8c60-48be-bd80-01f8099ca449\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 1f975693-876d-457b-a649-393859e79bf3\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/opt/anaconda3/envs/ai_agents_course/lib/python3.12/site-packages/whisper/transcribe.py:126: UserWarning: FP16 is not supported on CPU; using FP32 instead\n", " warnings.warn(\"FP16 is not supported on CPU; using FP32 instead\")\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Tools used for this question: ['audio_transcription']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 840bfca7-4f7b-481a-8794-c560c340185d\n", "Tools used for this question: ['serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: bda648d7-d618-4883-88f4-3466eabd860e\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: cf106601-ab4f-4af9-b045-5295fe67b37d\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: a0c07678-e491-4bbc-8f0b-07405144218f\n", "Tools used for this question: ['wikipedia', 'serpapi']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 7bd855d8-463d-4ed5-93ca-5fe35145f733\n", "Tools used for this question: ['excel_csv']\n", "Langfuse Trace URL: https://us.cloud.langfuse.com/project/cmabtwja701n8ad06grpw13lr/traces/1e8e8f8a-eb09-4326-a08a-7df954637367\n", "Processing Task ID: 5a0c1adf-205e-4841-a666-7c3ef95def9d\n", "Tools used for this question: ['wikipedia', 'serpapi']\n" ] } ], "source": [ "# Step 8: Main Agent Loop with Langfuse Traceability\n", "\n", "from langfuse.decorators import observe, langfuse_context\n", "\n", "@observe()\n", "def process_all_questions(questions, model_name, temperature, gaia_doc):\n", " final_answers = []\n", " for q in questions:\n", " print(f\"Processing Task ID: {q['task_id']}\")\n", " plan = get_agent_plan(q['question'], model_name, temperature, file_name=q.get(\"file_name\"))\n", " tool_outputs = execute_tools(plan, q['question'], file_name=q.get(\"file_name\"), temperature=temperature)\n", " # Use the correct task_id from the question\n", " final_answer_json = synthesize_final_answer(\n", " task_id=q['task_id'],\n", " question=q['question'],\n", " tool_outputs=tool_outputs,\n", " gaia_doc=gaia_doc,\n", " model_name=model_name,\n", " temperature=temperature\n", " )\n", " final_answers.append(final_answer_json)\n", " # Print trace URL for traceability\n", " print(\"Langfuse Trace URL:\", langfuse_context.get_current_trace_url())\n", " return final_answers\n", "\n", "# Load GAIA documentation from file\n", "with open(\"documentation/GIAI-documentation.md\", \"r\") as f:\n", " gaia_doc = f.read()\n", "\n", "# Process all questions:\n", "final_answers = process_all_questions(questions, model_name, temperature, gaia_doc)" ] }, { "cell_type": "code", "execution_count": 90, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Saved 1 answers to all-json/final_answers.jsonl\n" ] } ], "source": [ "# Step 9: Save Results (with cleaning and validation)\n", "\n", "import json\n", "import re\n", "\n", "def extract_json_from_response(answer_str):\n", " \"\"\"\n", " Extracts the first JSON object found in the string.\n", " \"\"\"\n", " # Remove code block markers if present\n", " answer_str = answer_str.strip()\n", " answer_str = re.sub(r\"^```[a-zA-Z]*\", \"\", answer_str)\n", " answer_str = re.sub(r\"```$\", \"\", answer_str).strip()\n", " # Find the first JSON object in the string\n", " match = re.search(r\"\\{[\\s\\S]*\\}\", answer_str)\n", " if match:\n", " return match.group(0)\n", " else:\n", " raise ValueError(f\"No JSON object found in answer:\\n{answer_str}\")\n", "\n", "def clean_and_validate_answer(answer_str, required_fields=(\"task_id\", \"submitted_answer\"), correct_task_id=None):\n", " try:\n", " json_str = extract_json_from_response(answer_str)\n", " answer_obj = json.loads(json_str)\n", " except Exception as e:\n", " raise ValueError(f\"Invalid JSON: {e}\\nRaw output: {answer_str}\")\n", " if correct_task_id is not None:\n", " answer_obj[\"task_id\"] = correct_task_id\n", " for field in required_fields:\n", " if field not in answer_obj:\n", " raise ValueError(f\"Missing required field '{field}' in answer: {answer_obj}\")\n", " return answer_obj\n", "\n", "def save_final_answers(final_answers, questions, filename=\"all-json/final_answers.jsonl\"):\n", " cleaned_answers = []\n", " for i, answer in enumerate(final_answers):\n", " correct_task_id = questions[i][\"task_id\"]\n", " if isinstance(answer, str):\n", " try:\n", " answer_obj = clean_and_validate_answer(answer, correct_task_id=correct_task_id)\n", " except Exception as e:\n", " print(f\"Error in answer {i}: {e}\")\n", " continue\n", " else:\n", " answer_obj = answer\n", " answer_obj[\"task_id\"] = correct_task_id\n", " cleaned_answers.append(answer_obj)\n", " with open(filename, \"w\") as f:\n", " for answer_obj in cleaned_answers:\n", " f.write(json.dumps(answer_obj, ensure_ascii=False) + \"\\n\")\n", " print(f\"Saved {len(cleaned_answers)} answers to {filename}\")\n", " return cleaned_answers\n", "\n", "# Example usage:\n", "final_answers_cleaned = save_final_answers(final_answers, questions)" ] }, { "cell_type": "code", "execution_count": 91, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Submission successful!\n", "Score: 0.0%\n", "No detailed results returned.\n" ] } ], "source": [ "# Step 10: Validate Answers (Check with GAIA API)\n", "\n", "import requests\n", "\n", "def validate_answers(final_answers, username, code_link, api_base_url, agent_code=None):\n", " \"\"\"\n", " Submits answers to the GAIA evaluation endpoint for validation.\n", " Prints the score and which answers were correct.\n", " \"\"\"\n", " url = f\"{api_base_url}/submit\"\n", " if agent_code is None:\n", " # Try to read your notebook as code, or use code_link as fallback\n", " try:\n", " with open(\"agent.ipynb\", \"r\") as f:\n", " agent_code = f.read()\n", " except Exception:\n", " agent_code = code_link # fallback\n", " payload = {\n", " \"username\": username,\n", " \"code_link\": code_link,\n", " \"agent_code\": agent_code,\n", " \"answers\": final_answers # <-- Use the cleaned answers here!\n", " }\n", " response = requests.post(url, json=payload)\n", " if response.status_code == 200:\n", " result = response.json()\n", " print(\"Submission successful!\")\n", " print(f\"Score: {result.get('score', 'N/A')}%\")\n", " if \"results\" in result:\n", " print(\"\\nDetailed Results:\")\n", " for r in result[\"results\"]:\n", " status = \"✅\" if r.get(\"correct\") else \"❌\"\n", " print(f\"{status} Task ID: {r['task_id']} | Your Answer: {r['submitted_answer']} | Correct: {r.get('correct_answer', 'N/A')}\")\n", " else:\n", " print(\"No detailed results returned.\")\n", " return result\n", " else:\n", " print(\"Submission failed:\", response.status_code, response.text)\n", " return None\n", "\n", "# Example usage:\n", "validation_result = validate_answers(final_answers_cleaned, username, code_link, api_base_url)" ] }, { "cell_type": "code", "execution_count": 92, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Validation results saved to all-json/validation_results.json\n" ] } ], "source": [ "# Step 11: (Optional) Save Validation Results\n", "\n", "def save_validation_results(validation_result, filename=\"all-json/validation_results.json\"):\n", " if validation_result is not None:\n", " with open(filename, \"w\") as f:\n", " json.dump(validation_result, f, indent=2, ensure_ascii=False)\n", " print(f\"Validation results saved to {filename}\")\n", "\n", "# Example usage:\n", "save_validation_results(validation_result)" ] }, { "cell_type": "code", "execution_count": 93, "metadata": {}, "outputs": [], "source": [ "# Step 12: Save Validation Results (with cleaning and validation)\n", "\n", "# --- Langfuse flush at the end of the notebook ---\n", "from langfuse.decorators import langfuse_context # (if not already imported)\n", "langfuse_context.flush()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "ai_agents_course", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.9" } }, "nbformat": 4, "nbformat_minor": 2 }
ChillThrills
45
2025-05-13T14:17:03.943272+00:00
https://huggingface.co/spaces/ChillThrills/Final_Assignment_Template/tree/main
khanhnguyen4999
30
2025-05-13T14:19:33.418063+00:00
https://huggingface.co/spaces/khanhnguyen4999/Final_Assignment_Template/tree/main