Files
crewAI/tests/cassettes/test_crew_kickoff_usage_metrics.yaml
2024-09-13 04:38:19 -05:00

379 lines
15 KiB
YAML

interactions:
- request:
body: '{"messages": [{"role": "system", "content": "You are dog Researcher. You
have a lot of experience with dog.\nYour personal goal is: Express hot takes
on dog.\nTo give my best complete final answer to the task use the exact following
format:\n\nThought: I now can give a great answer\nFinal Answer: Your final
answer must be the great and the most complete as possible, it must be outcome
described.\n\nI MUST use these formats, my job depends on it!"}, {"role": "user",
"content": "\nCurrent Task: Give me an analysis around dog.\n\nThis is the expect
criteria for your final answer: 1 bullet point about dog that''s under 15 words.\nyou
MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
This is VERY important to you, use the tools available and give your best Final
Answer, your job depends on it!\n\nThought:"}], "model": "gpt-4o", "stop": ["\nObservation"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '896'
content-type:
- application/json
cookie:
- __cf_bm=r6BNUnXat.26mviZiJYUf7Qj4ea5gbt_oPxyClKcVOE-1726214380-1.0.1.1-2S3G10sPdiG2syQSO8wKWXE7Nmdgt88imgakr81w9DemMH8mVDg0q6r9ANx6tM4_CPvgGQlMVstDhDRkIDwhzg;
_cfuvid=dp2yoCmGHVlURh_U78_UoIfnhcO5UhufnL5whkUSMTQ-1726214380689-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.45.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.45.0
x-stainless-raw-response:
- 'true'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-A6vjDzls8MRpmSVHaQQ76tHXn7NcE\",\n \"object\":
\"chat.completion\",\n \"created\": 1726215239,\n \"model\": \"gpt-4o-2024-05-13\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"Thought: I now can give a great answer\\nFinal
Answer: Dogs offer unmatched loyalty and companionship to humans.\",\n \"refusal\":
null\n },\n \"logprobs\": null,\n \"finish_reason\": \"stop\"\n
\ }\n ],\n \"usage\": {\n \"prompt_tokens\": 175,\n \"completion_tokens\":
22,\n \"total_tokens\": 197,\n \"completion_tokens_details\": {\n \"reasoning_tokens\":
0\n }\n },\n \"system_fingerprint\": \"fp_25624ae3a5\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8c26ae5c9d2e2ab9-LAX
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Fri, 13 Sep 2024 08:13:59 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '354'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999792'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_0ebb641136a1fdef074ea8b34a7b0456
http_version: HTTP/1.1
status_code: 200
- request:
body: '{"messages": [{"role": "system", "content": "You are cat Researcher. You
have a lot of experience with cat.\nYour personal goal is: Express hot takes
on cat.\nTo give my best complete final answer to the task use the exact following
format:\n\nThought: I now can give a great answer\nFinal Answer: Your final
answer must be the great and the most complete as possible, it must be outcome
described.\n\nI MUST use these formats, my job depends on it!"}, {"role": "user",
"content": "\nCurrent Task: Give me an analysis around cat.\n\nThis is the expect
criteria for your final answer: 1 bullet point about cat that''s under 15 words.\nyou
MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
This is VERY important to you, use the tools available and give your best Final
Answer, your job depends on it!\n\nThought:"}], "model": "gpt-4o", "stop": ["\nObservation"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '896'
content-type:
- application/json
cookie:
- __cf_bm=r6BNUnXat.26mviZiJYUf7Qj4ea5gbt_oPxyClKcVOE-1726214380-1.0.1.1-2S3G10sPdiG2syQSO8wKWXE7Nmdgt88imgakr81w9DemMH8mVDg0q6r9ANx6tM4_CPvgGQlMVstDhDRkIDwhzg;
_cfuvid=dp2yoCmGHVlURh_U78_UoIfnhcO5UhufnL5whkUSMTQ-1726214380689-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.45.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.45.0
x-stainless-raw-response:
- 'true'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-A6vjEBG9zuYa8NHm2snsKrT0lVU4G\",\n \"object\":
\"chat.completion\",\n \"created\": 1726215240,\n \"model\": \"gpt-4o-2024-05-13\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"Thought: I now can give a great answer.\\nFinal
Answer: Cats are independent yet affectionate, making them both mysterious and
loving companions.\",\n \"refusal\": null\n },\n \"logprobs\":
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
175,\n \"completion_tokens\": 27,\n \"total_tokens\": 202,\n \"completion_tokens_details\":
{\n \"reasoning_tokens\": 0\n }\n },\n \"system_fingerprint\": \"fp_25624ae3a5\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8c26ae64ecf82ab9-LAX
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Fri, 13 Sep 2024 08:14:01 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '419'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999792'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_ebcc2021020499348934a9488febda45
http_version: HTTP/1.1
status_code: 200
- request:
body: !!binary |
CqoNCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkSgQ0KEgoQY3Jld2FpLnRl
bGVtZXRyeRKTAQoQt5xgPiroSTZnbmm8j71S6hIIj/x0cldfUMYqClRvb2wgVXNhZ2UwATkI4sUj
nL/0F0EY68ojnL/0F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjU2LjBKHwoJdG9vbF9uYW1lEhIK
EGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYAYUBAAEAABKQAgoQmHaQZn62E5aQ
Z8zXk4JYGBIISrDCmPRrWDYqDlRhc2sgRXhlY3V0aW9uMAE5eOg61Zq/9BdB6LoOs5y/9BdKLgoI
Y3Jld19rZXkSIgogNGY3ODRjNDE0MTEzMGVhYWRjYjY5ODUyZjkyMjI4NzNKMQoHY3Jld19pZBIm
CiQxOTlkMWQ1My0yNjU1LTQ4YjgtYjAxMC0yZjdhMTJhMGNkM2RKLgoIdGFza19rZXkSIgogODkz
MmU4YjVkNzI5NWE0MzNmZDMzZjYxMmZmMDdkNjBKMQoHdGFza19pZBImCiRhY2Y3ZTkzYy1iOTlh
LTQxOTQtYTJhMC0wY2Y1OWYzYTYwYzZ6AhgBhQEAAQAAErAHChBBl375wFuX/iFPHlHrhVqIEgho
ln1zZqxHHyoMQ3JldyBDcmVhdGVkMAE50BjEvJy/9BdBQKfFvJy/9BdKGgoOY3Jld2FpX3ZlcnNp
b24SCAoGMC41Ni4wShoKDnB5dGhvbl92ZXJzaW9uEggKBjMuMTEuN0ouCghjcmV3X2tleRIiCiBl
ZTY3NDVkN2M4YWU4MmUwMGRmOTRkZTBmN2Y4NzExOEoxCgdjcmV3X2lkEiYKJDBlMGM0YWRjLTM3
NGItNGE2Zi04NTQ1LTZmYjc0OTQ1YWNhZEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoR
CgtjcmV3X21lbW9yeRICEABKGgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVt
YmVyX29mX2FnZW50cxICGAFK1gIKC2NyZXdfYWdlbnRzEsYCCsMCW3sia2V5IjogImYzMzg2ZjZk
OGRhNzVhYTQxNmE2ZTMxMDA1M2Y3Njk4IiwgImlkIjogImZhZjgwMWY5LTMxMTItNDE3NC04NmRi
LTM1MGQ2NTMwMzQ2YyIsICJyb2xlIjogInt0b3BpY30gUmVzZWFyY2hlciIsICJ2ZXJib3NlPyI6
IGZhbHNlLCAibWF4X2l0ZXIiOiAyNSwgIm1heF9ycG0iOiBudWxsLCAiZnVuY3Rpb25fY2FsbGlu
Z19sbG0iOiBudWxsLCAibGxtIjogImdwdC00byIsICJkZWxlZ2F0aW9uX2VuYWJsZWQ/IjogZmFs
c2UsICJhbGxvd19jb2RlX2V4ZWN1dGlvbj8iOiBmYWxzZSwgIm1heF9yZXRyeV9saW1pdCI6IDIs
ICJ0b29sc19uYW1lcyI6IFtdfV1KhwIKCmNyZXdfdGFza3MS+AEK9QFbeyJrZXkiOiAiMDZhNzMy
MjBmNDE0OGE0YmJkNWJhY2IwZDBiNDRmY2UiLCAiaWQiOiAiZmM2NmY2MDktMjBhYi00MWNiLWI0
MWEtNTgxM2I1MzNlNDY0IiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBmYWxzZSwgImh1bWFuX2lucHV0
PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJ7dG9waWN9IFJlc2VhcmNoZXIiLCAiYWdlbnRfa2V5
IjogImYzMzg2ZjZkOGRhNzVhYTQxNmE2ZTMxMDA1M2Y3Njk4IiwgInRvb2xzX25hbWVzIjogW119
XXoCGAGFAQABAAASjgIKEPZ3SzT7O1uH2nhi/TWwrVESCHu5/uaSgq33KgxUYXNrIENyZWF0ZWQw
ATlY5M+8nL/0F0GQLtC8nL/0F0ouCghjcmV3X2tleRIiCiBkMGZlZTY5MzIzOTU4ODZmMjAzZjQ0
NmI3MmMxYjAwYUoxCgdjcmV3X2lkEiYKJDBlMGM0YWRjLTM3NGItNGE2Zi04NTQ1LTZmYjc0OTQ1
YWNhZEouCgh0YXNrX2tleRIiCiAwNmE3MzIyMGY0MTQ4YTRiYmQ1YmFjYjBkMGI0NGZjZUoxCgd0
YXNrX2lkEiYKJGZjNjZmNjA5LTIwYWItNDFjYi1iNDFhLTU4MTNiNTMzZTQ2NHoCGAGFAQABAAA=
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '1709'
Content-Type:
- application/x-protobuf
User-Agent:
- OTel-OTLP-Exporter-Python/1.27.0
method: POST
uri: https://telemetry.crewai.com:4319/v1/traces
response:
body:
string: "\n\0"
headers:
Content-Length:
- '2'
Content-Type:
- application/x-protobuf
Date:
- Fri, 13 Sep 2024 08:14:01 GMT
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "You are apple Researcher.
You have a lot of experience with apple.\nYour personal goal is: Express hot
takes on apple.\nTo give my best complete final answer to the task use the exact
following format:\n\nThought: I now can give a great answer\nFinal Answer: Your
final answer must be the great and the most complete as possible, it must be
outcome described.\n\nI MUST use these formats, my job depends on it!"}, {"role":
"user", "content": "\nCurrent Task: Give me an analysis around apple.\n\nThis
is the expect criteria for your final answer: 1 bullet point about apple that''s
under 15 words.\nyou MUST return the actual complete content as the final answer,
not a summary.\n\nBegin! This is VERY important to you, use the tools available
and give your best Final Answer, your job depends on it!\n\nThought:"}], "model":
"gpt-4o", "stop": ["\nObservation"]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '906'
content-type:
- application/json
cookie:
- __cf_bm=r6BNUnXat.26mviZiJYUf7Qj4ea5gbt_oPxyClKcVOE-1726214380-1.0.1.1-2S3G10sPdiG2syQSO8wKWXE7Nmdgt88imgakr81w9DemMH8mVDg0q6r9ANx6tM4_CPvgGQlMVstDhDRkIDwhzg;
_cfuvid=dp2yoCmGHVlURh_U78_UoIfnhcO5UhufnL5whkUSMTQ-1726214380689-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.45.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.45.0
x-stainless-raw-response:
- 'true'
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
content: "{\n \"id\": \"chatcmpl-A6vjGnJw6ZLJxsj7NK9SOTZDPsleO\",\n \"object\":
\"chat.completion\",\n \"created\": 1726215242,\n \"model\": \"gpt-4o-2024-05-13\",\n
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
\"assistant\",\n \"content\": \"Thought: I now can give a great answer\\nFinal
Answer: Apple consistently revolutionizes consumer tech with design, performance,
and ecosystem integration.\",\n \"refusal\": null\n },\n \"logprobs\":
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
175,\n \"completion_tokens\": 28,\n \"total_tokens\": 203,\n \"completion_tokens_details\":
{\n \"reasoning_tokens\": 0\n }\n },\n \"system_fingerprint\": \"fp_25624ae3a5\"\n}\n"
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8c26ae6e6d592ab9-LAX
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Fri, 13 Sep 2024 08:14:04 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
access-control-expose-headers:
- X-Request-ID
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '2101'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999791'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_b7b21104c1b4fef5af304952b4202697
http_version: HTTP/1.1
status_code: 200
version: 1