interactions: - request: body: '{"messages": [{"role": "system", "content": "You are Scorer. You''re an expert scorer, specialized in scoring titles.\nYour personal goal is: Score the title\nTo give my best complete final answer to the task use the exact following format:\n\nThought: I now can give a great answer\nFinal Answer: Your final answer must be the great and the most complete as possible, it must be outcome described.\n\nI MUST use these formats, my job depends on it!"}, {"role": "user", "content": "\nCurrent Task: Give me an integer score between 1-5 for the following title: ''The impact of AI in the future of work''\n\nThis is the expect criteria for your final answer: The score of the title.\nyou MUST return the actual complete content as the final answer, not a summary.\n\nBegin! This is VERY important to you, use the tools available and give your best Final Answer, your job depends on it!\n\nThought:"}], "model": "gpt-4o", "stop": ["\nObservation:"]}' headers: accept: - application/json accept-encoding: - gzip, deflate connection: - keep-alive content-length: - '943' content-type: - application/json cookie: - __cf_bm=1SckBhvJ18Dazp6bi8DEKYeiS9Q4.6_6i3nmLBw9b6g-1726476036-1.0.1.1-TnN4UpDXA33YXCVCUWOaZ12vGIg_o5NpJQEUHgjn6XdUgb7M0ND8PdkTfkd8rrxG5XFlPRMzI54GxZ0FeUY9xw; _cfuvid=0Rs4xTPk7h7OIXuSbTgMVVD9JSoZeKMwnygKHoHQo3k-1726476036297-0.0.1.1-604800000 host: - api.openai.com user-agent: - OpenAI/Python 1.45.0 x-stainless-arch: - arm64 x-stainless-async: - 'false' x-stainless-lang: - python x-stainless-os: - MacOS x-stainless-package-version: - 1.45.0 x-stainless-raw-response: - 'true' x-stainless-runtime: - CPython x-stainless-runtime-version: - 3.11.7 method: POST uri: https://api.openai.com/v1/chat/completions response: content: "{\n \"id\": \"chatcmpl-A81kb4GGgul5CYnrGzyUeSf31w5hE\",\n \"object\": \"chat.completion\",\n \"created\": 1726476717,\n \"model\": \"gpt-4o-2024-05-13\",\n \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": \"assistant\",\n \"content\": \"Thought: I now can give a great answer\\nFinal Answer: 4\",\n \"refusal\": null\n },\n \"logprobs\": null,\n \ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 186,\n \"completion_tokens\": 15,\n \"total_tokens\": 201,\n \"completion_tokens_details\": {\n \"reasoning_tokens\": 0\n }\n },\n \"system_fingerprint\": \"fp_25624ae3a5\"\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - 8c3f9e1b1e5e2233-MIA Connection: - keep-alive Content-Encoding: - gzip Content-Type: - application/json Date: - Mon, 16 Sep 2024 08:51:57 GMT Server: - cloudflare Transfer-Encoding: - chunked X-Content-Type-Options: - nosniff access-control-expose-headers: - X-Request-ID alt-svc: - h3=":443"; ma=86400 openai-organization: - crewai-iuxna1 openai-processing-ms: - '269' openai-version: - '2020-10-01' strict-transport-security: - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '10000' x-ratelimit-limit-tokens: - '30000000' x-ratelimit-remaining-requests: - '9999' x-ratelimit-remaining-tokens: - '29999781' x-ratelimit-reset-requests: - 6ms x-ratelimit-reset-tokens: - 0s x-request-id: - req_833c97f3f864f15acbb41232563bf073 http_version: HTTP/1.1 status_code: 200 - request: body: !!binary | CogLCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS3woKEgoQY3Jld2FpLnRl bGVtZXRyeRKcAQoQ7d1A+XCffF5dRL2JYLQQjBIIILdxFxk6t60qClRvb2wgVXNhZ2UwATl4OzrW bK31F0FIPFDWbK31F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjU2LjNKKAoJdG9vbF9uYW1lEhsK GURlbGVnYXRlIHdvcmsgdG8gY293b3JrZXJKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABKYBwoQ CmREBx+AN5GB6MrBvKoTIhIIup++pbvF2+MqDENyZXcgQ3JlYXRlZDABOZCypkhtrfUXQWhlrUht rfUXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuNTYuM0oaCg5weXRob25fdmVyc2lvbhIICgYzLjEx LjdKLgoIY3Jld19rZXkSIgogNWU2ZWZmZTY4MGE1ZDk3ZGMzODczYjE0ODI1Y2NmYTNKMQoHY3Jl d19pZBImCiQ4MDI0NDVhYS1lZGVmLTRjOGItYmQ3OS01Y2E4OTFjYzdiOGZKHAoMY3Jld19wcm9j ZXNzEgwKCnNlcXVlbnRpYWxKEQoLY3Jld19tZW1vcnkSAhAAShoKFGNyZXdfbnVtYmVyX29mX3Rh c2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSsoCCgtjcmV3X2FnZW50cxK6Agq3 Alt7ImtleSI6ICI5MmU3ZWIxOTE2NjRjOTM1Nzg1ZWQ3ZDQyNDBhMjk0ZCIsICJpZCI6ICJlZTlj YjQyNS0zMDZkLTQzODAtYTY2MC1jMDZhODNjYTI1OGIiLCAicm9sZSI6ICJTY29yZXIiLCAidmVy Ym9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMTUsICJtYXhfcnBtIjogbnVsbCwgImZ1bmN0aW9u X2NhbGxpbmdfbGxtIjogbnVsbCwgImxsbSI6ICJncHQtNG8iLCAiZGVsZWdhdGlvbl9lbmFibGVk PyI6IGZhbHNlLCAiYWxsb3dfY29kZV9leGVjdXRpb24/IjogZmFsc2UsICJtYXhfcmV0cnlfbGlt aXQiOiAyLCAidG9vbHNfbmFtZXMiOiBbXX1dSvsBCgpjcmV3X3Rhc2tzEuwBCukBW3sia2V5Ijog IjI3ZWYzOGNjOTlkYTRhOGRlZDcwZWQ0MDZlNDRhYjg2IiwgImlkIjogIjgzNTA2NGQyLTQyYTgt NGJmOS1hZmY3LThhOTg1YjQ0MTc1MCIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJodW1h bl9pbnB1dD8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiU2NvcmVyIiwgImFnZW50X2tleSI6ICI5 MmU3ZWIxOTE2NjRjOTM1Nzg1ZWQ3ZDQyNDBhMjk0ZCIsICJ0b29sc19uYW1lcyI6IFtdfV16AhgB hQEAAQAAEo4CChCrZK/rfR6/fdUNukoLrKPgEgh6pzZNa99uRioMVGFzayBDcmVhdGVkMAE5eALE SG2t9RdB0JrESG2t9RdKLgoIY3Jld19rZXkSIgogNWU2ZWZmZTY4MGE1ZDk3ZGMzODczYjE0ODI1 Y2NmYTNKMQoHY3Jld19pZBImCiQ4MDI0NDVhYS1lZGVmLTRjOGItYmQ3OS01Y2E4OTFjYzdiOGZK LgoIdGFza19rZXkSIgogMjdlZjM4Y2M5OWRhNGE4ZGVkNzBlZDQwNmU0NGFiODZKMQoHdGFza19p ZBImCiQ4MzUwNjRkMi00MmE4LTRiZjktYWZmNy04YTk4NWI0NDE3NTB6AhgBhQEAAQAA headers: Accept: - '*/*' Accept-Encoding: - gzip, deflate Connection: - keep-alive Content-Length: - '1419' Content-Type: - application/x-protobuf User-Agent: - OTel-OTLP-Exporter-Python/1.27.0 method: POST uri: https://telemetry.crewai.com:4319/v1/traces response: body: string: "\n\0" headers: Content-Length: - '2' Content-Type: - application/x-protobuf Date: - Mon, 16 Sep 2024 08:51:57 GMT status: code: 200 message: OK - request: body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content": "I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice": {"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type": "function", "function": {"name": "ScoreOutput", "description": "Correctly extracted `ScoreOutput` with all the required parameters with correct types", "parameters": {"properties": {"score": {"title": "Score", "type": "integer"}}, "required": ["score"], "type": "object"}}}]}' headers: accept: - application/json accept-encoding: - gzip, deflate connection: - keep-alive content-length: - '519' content-type: - application/json cookie: - __cf_bm=04EJuO0mh2HU6FxLd2CpOc6Z6MMpyxhMCz_t6onOpaY-1726476704-1.0.1.1-BHP4W7X58z8ba4ns2wYBcPx53or5JKBDisMn8i7dQluTmjuN201bym7OqEkZrmUlhr0n3oODNJf0SS5BVipRKg; _cfuvid=JBsdqQQtLEVtIStEIeDR0TRIr0GMGhWKAyum6suoH.4-1726476704561-0.0.1.1-604800000 host: - api.openai.com user-agent: - OpenAI/Python 1.45.0 x-stainless-arch: - arm64 x-stainless-async: - 'false' x-stainless-lang: - python x-stainless-os: - MacOS x-stainless-package-version: - 1.45.0 x-stainless-raw-response: - 'true' x-stainless-runtime: - CPython x-stainless-runtime-version: - 3.11.7 method: POST uri: https://api.openai.com/v1/chat/completions response: content: "{\n \"id\": \"chatcmpl-A81kcAzrzm5uqvdnBLdy0tpUnafaE\",\n \"object\": \"chat.completion\",\n \"created\": 1726476718,\n \"model\": \"gpt-4o-2024-05-13\",\n \ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\": \"assistant\",\n \"content\": null,\n \"tool_calls\": [\n {\n \ \"id\": \"call_85HWnSWdkPJzFSJTRjRN4dZf\",\n \"type\": \"function\",\n \"function\": {\n \"name\": \"ScoreOutput\",\n \ \"arguments\": \"{\\\"score\\\":4}\"\n }\n }\n \ ],\n \"refusal\": null\n },\n \"logprobs\": null,\n \ \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 80,\n \"completion_tokens\": 5,\n \"total_tokens\": 85,\n \"completion_tokens_details\": {\n \"reasoning_tokens\": 0\n }\n },\n \"system_fingerprint\": \"fp_25624ae3a5\"\n}\n" headers: CF-Cache-Status: - DYNAMIC CF-RAY: - 8c3f9e1eaa857494-MIA Connection: - keep-alive Content-Encoding: - gzip Content-Type: - application/json Date: - Mon, 16 Sep 2024 08:51:58 GMT Server: - cloudflare Transfer-Encoding: - chunked X-Content-Type-Options: - nosniff access-control-expose-headers: - X-Request-ID alt-svc: - h3=":443"; ma=86400 openai-organization: - crewai-iuxna1 openai-processing-ms: - '136' openai-version: - '2020-10-01' strict-transport-security: - max-age=15552000; includeSubDomains; preload x-ratelimit-limit-requests: - '10000' x-ratelimit-limit-tokens: - '30000000' x-ratelimit-remaining-requests: - '9999' x-ratelimit-remaining-tokens: - '29999968' x-ratelimit-reset-requests: - 6ms x-ratelimit-reset-tokens: - 0s x-request-id: - req_59296861d2f237130ac16e74b986b074 http_version: HTTP/1.1 status_code: 200 version: 1