Files
crewAI/tests/cassettes/test_output_pydantic_to_another_task.yaml
2024-08-11 00:58:41 -03:00

511 lines
21 KiB
YAML

interactions:
- request:
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
final answer to the task use the exact following format:\n\nThought: I now can
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
final answer must be the great and the most complete as possible, it must be
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
Task: Give me an integer score between 1-5 for the following title: ''The impact
of AI in the future of work''\n\nThis is the expect criteria for your final
answer: The score of the title. \n you MUST return the actual complete content
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
use the tools available and give your best Final Answer, your job depends on
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4-0125-preview", "n": 1,
"stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1009'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.40.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.40.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
now"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
can"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
great"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqUPD3teE3tejHXmxLbDoeKi7Kq","object":"chat.completion.chunk","created":1723348066,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b153f061c431aca-GRU
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Sun, 11 Aug 2024 03:47:47 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=1uYub9Bs6z1MewG3zBUU6Ei4I1ErHrDYihXrFKWStSg-1723348067-1.0.1.1-SzDOHRxBt_RqSTo0q9Aoqm7sybN_gBq3Fsg.jzn8jdOEBe0uwi6wiUzCI0XU7IuJSf9qXXbYXar.QxFpdi6loQ;
path=/; expires=Sun, 11-Aug-24 04:17:47 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=iWmVnMB0AFb9kmsUZjhYDk17hiSVqx5HZg_io8AMsf8-1723348067193-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '535'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '2000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '1999771'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 6ms
x-request-id:
- req_e9bc644b58b7fddeb31f475967f6fa38
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-3.5-turbo-0125",
"tool_choice": {"type": "function", "function": {"name": "ScoreOutput"}}, "tools":
[{"type": "function", "function": {"name": "ScoreOutput", "description": "Correctly
extracted `ScoreOutput` with all the required parameters with correct types",
"parameters": {"properties": {"score": {"title": "Score", "type": "integer"}},
"required": ["score"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '531'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.40.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.40.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAAwAAAP//bFLLbtswELzrK4g924EdxZGqWxAULYo+4iaID00gUPRKVk2RDLlEahj+
94CSIilGeSCInZ2d4ZDHiDGot5AxEDtOojFy/snTy+bn530SI/3ePN5er9L1/tv35nD3dY0wCwxd
/EVB76wLoRsjkWqtOlhY5IRh6jK5jOOrdHGdtkCjtygDrTI0jy9Wc/K20PPF8nLVM3e6FuggY38i
xhg7tnvwqLb4DzK2mL1XGnSOVwjZ0MQYWC1DBbhztSOuCGYjKLQiVMG28lJOANJa5oJLOQp36zg5
j0FxKXN1/2K3yUOy+XF7s/ZFuXstll9ksp7odaMPpjVUeiWGgCb4UM/OxBgDxZuWey+0xV+ejKcz
OmPAbeUbVBSsw/EJXGh+guzqBB9aT9H/zs+TFCyW3nHZx9PXT0PeUlfG6sKdxQdlrWq3yy1y114D
HGnTaQedVgH8h6cCY3VjKCe9RxUGpnE3DsafNIKrHiNNXE44adTbA3dwhE1e1qpCa2w9vHF0it4A
AAD//wMAgqJSXeICAAA=
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b153f10af78621e-GRU
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sun, 11 Aug 2024 03:47:48 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=_DKjTo3CcQPIdjfvLYD2V6klcVp1TYtSVmCNtKdebgA-1723348068-1.0.1.1-Zm_L8BsfW2x7nO3iIbL_n6qFwIknqiMEBDNkUOTUykXvIU798WtYjFi1ez9Xf4c4JJtwCWrY3saJW2Se5QnbRg;
path=/; expires=Sun, 11-Aug-24 04:17:48 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=oz1iz7yvpq1pVkBGg2rNM1dP1ix7buS1rizpcTA0C5k-1723348068734-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '447'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '50000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '49999968'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_52470430a27c80a936c16a9a066bdf21
status:
code: 200
message: OK
- request:
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
final answer to the task use the exact following format:\n\nThought: I now can
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
final answer must be the great and the most complete as possible, it must be
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
Task: Given the score the title ''The impact of AI in the future of work'' got,
give me an integer score between 1-5 for the following title: ''Return of the
Jedi'', you MUST give it a score, use your best judgment\n\nThis is the expect
criteria for your final answer: The score of the title. \n you MUST return the
actual complete content as the final answer, not a summary.\n\nThis is the context
you''re working with:\n4\n\nBegin! This is VERY important to you, use the tools
available and give your best Final Answer, your job depends on it!\n\nThought:\n",
"role": "user"}], "model": "gpt-4-0125-preview", "n": 1, "stop": ["\nObservation"],
"stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1158'
content-type:
- application/json
cookie:
- __cf_bm=1uYub9Bs6z1MewG3zBUU6Ei4I1ErHrDYihXrFKWStSg-1723348067-1.0.1.1-SzDOHRxBt_RqSTo0q9Aoqm7sybN_gBq3Fsg.jzn8jdOEBe0uwi6wiUzCI0XU7IuJSf9qXXbYXar.QxFpdi6loQ;
_cfuvid=iWmVnMB0AFb9kmsUZjhYDk17hiSVqx5HZg_io8AMsf8-1723348067193-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.40.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.40.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"","refusal":null},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
now"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
can"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
great"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9utqXllY3K6aEK6WUpU8V6rsCNorz","object":"chat.completion.chunk","created":1723348069,"model":"gpt-4-0125-preview","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b153f161e2a1aca-GRU
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Sun, 11 Aug 2024 03:47:49 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '494'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '2000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '1999735'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 7ms
x-request-id:
- req_16dd29893234ec4d382f227741452b5a
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "5"}, {"role": "system", "content":
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-3.5-turbo-0125",
"tool_choice": {"type": "function", "function": {"name": "ScoreOutput"}}, "tools":
[{"type": "function", "function": {"name": "ScoreOutput", "description": "Correctly
extracted `ScoreOutput` with all the required parameters with correct types",
"parameters": {"properties": {"score": {"title": "Score", "type": "integer"}},
"required": ["score"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '531'
content-type:
- application/json
cookie:
- __cf_bm=_DKjTo3CcQPIdjfvLYD2V6klcVp1TYtSVmCNtKdebgA-1723348068-1.0.1.1-Zm_L8BsfW2x7nO3iIbL_n6qFwIknqiMEBDNkUOTUykXvIU798WtYjFi1ez9Xf4c4JJtwCWrY3saJW2Se5QnbRg;
_cfuvid=oz1iz7yvpq1pVkBGg2rNM1dP1ix7buS1rizpcTA0C5k-1723348068734-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.40.3
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.40.3
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAA2xSXU/bMBR9z6+w7jNFpV3XNm8MCTEhMRAbUzVQ5DpOarB9jX3Nhqr+98lJloRq
eYise+75yHH2GWOgSsgZiB0nYZyerCO9br5fKLe7fb26qTa/1/y2/EGrzV25/AoniYHbZynoH+tU
oHFakkLbwsJLTjKpni1n8/mn1XQ5bQCDpdSJVjuazE8XE4p+i5Pp2WzRMXeohAyQs18ZY4ztm3fK
aEv5B3LW6DQTI0PgtYS8X2IMPOo0AR6CCsQtwckACrQkbYpto9YjgBB1IbjWg3H77EfnoSiudXEx
fXNCnH8x9u2zmV3x9cvN5c/ry4eRXyv97ppAVbSiL2iE9/P8yIwxsNw03HuBXn6L5CId0RkD7uto
pKUUHfaPENLyI+SLA3xYPWT/Oz+NWvCyioHrrp5ufuj71lg7j9twVB9UyqqwK7zkofkMCISu9U4+
jQPED1cFzqNxVBC+SJsEV/NWDoY/aQAXHUZIXI84q6yLB+E9kDRFpWwtvfOqv+PskP0FAAD//wMA
wdyAb+ICAAA=
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8b153f1e28eb621e-GRU
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Sun, 11 Aug 2024 03:47:50 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '117'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '50000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '49999969'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_b44dd735a40d4c525a45418a54496e6e
status:
code: 200
message: OK
version: 1