Files
crewAI/tests/cassettes/test_agent_moved_on_after_max_iterations.yaml
2024-02-13 01:50:12 -08:00

774 lines
35 KiB
YAML

interactions:
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
tools:\n\n(\"get_final_answer: get_final_answer(numbers) -> float - Get the
final answer but don''t give it yet, just re-use this\\n tool non-stop.\",)\n\nTo
use a tool, please use the exact following format:\n\n```\nThought: Do I need
to use a tool? Yes\nAction: the tool you wanna use, should be one of [get_final_answer],
just the name.\nAction Input: Any and all relevant information input and context
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
have a response for your task, or if you do not need to use a tool, you MUST
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
[your response here]```This is the summary of your work so far:\nBegin! This
is VERY important to you, your job depends on it!\n\nCurrent Task: The final
answer is 42. But don''t give it yet, instead keep using the `get_final_answer`
tool. Until you''re told you could give my final answer if I''m ready.\n"}],
"model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream": true, "temperature":
0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1207'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Do"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
need"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
use"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Yes"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZTbAFt0BcHwp0sgbKC6bC23h2U","object":"chat.completion.chunk","created":1707817251,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 854c1cba9cd99450-SJC
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 13 Feb 2024 09:40:51 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=Ut7.Mf8nVlzF1mvnebp8z87PJWGdMJgpvIC3DniTzm0-1707817251-1-ARgaU3vRuSzlOdxfjwpTMGQz9fGZJuu9H9QhwNXFnW8inmWAf9rgWn+vUdAdOAH+OMmxEkE7k8c1QVH5A4i/onE=;
path=/; expires=Tue, 13-Feb-24 10:10:51 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=g.c9TwTQPsU2YZIqjeD2.nLBDZiyErteYcViVmTFshY-1707817251436-0-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- user-z7g4wmlazxqvc5wjyaaaocfz
openai-processing-ms:
- '285'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299721'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 55ms
x-request-id:
- req_5c80e8f3f6118f2f74046e60673d288e
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "system", "content": "\n The
schema should have the following structure, only two key:\n -
tool_name: str\n - arguments: dict (with all
arguments being passed)\n\n Example:\n {\"tool_name\":
\"tool_name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}\n "},
{"role": "user", "content": "Tools available:\n\nTool Name: get_final_answer\nTool
Description: get_final_answer(numbers) -> float - Get the final answer but don''t
give it yet, just re-use this\n tool non-stop.\nTool Arguments: {''numbers'':
{}}\n\nReturn a valid schema for the tool, use this text to inform a valid ouput
schema:\n\nTool Name: get_final_answer\nTool Arguments: 42```"}], "model": "gpt-4",
"function_call": {"name": "InstructorToolCalling"}, "functions": [{"name": "InstructorToolCalling",
"description": "Correctly extracted `InstructorToolCalling` with all the required
parameters with correct types", "parameters": {"properties": {"tool_name": {"description":
"The name of the tool to be called.", "title": "Tool Name", "type": "string"},
"arguments": {"description": "A dictinary of arguments to be passed to the tool.",
"title": "Arguments", "type": "object"}}, "required": ["arguments", "tool_name"],
"type": "object"}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1427'
content-type:
- application/json
cookie:
- __cf_bm=Ut7.Mf8nVlzF1mvnebp8z87PJWGdMJgpvIC3DniTzm0-1707817251-1-ARgaU3vRuSzlOdxfjwpTMGQz9fGZJuu9H9QhwNXFnW8inmWAf9rgWn+vUdAdOAH+OMmxEkE7k8c1QVH5A4i/onE=;
_cfuvid=g.c9TwTQPsU2YZIqjeD2.nLBDZiyErteYcViVmTFshY-1707817251436-0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAA1yRwW7bMBBE7/oKYs92YCtxbOiW5pRL0YMLFIkKgaZXElNyVyBXaAJD/x5QUlSn
F4KY2XkaLS+ZUmDPUCgwrRbjO7c+hNfnn091fX7+1n7Xx0P7IA/HX7c/en5/FFilBJ9e0chn6saw
7xyKZZpsE1ALJup2v9kftvt8l4+G5zO6FGs6Wd+tN/fb2znRsjUYoVAvmVJKXcYzdaMzvkGhNqtP
xWOMukEoliGlILBLCugYbRRNU8/ZNEyClOpS79yVUfdkUuvKaOe+AJUC0n5EPlGU0BvhcGR2j9o5
S80VXinQoek9kqT+cClJqRKE2VWJUUKhSmhQqtqSdpWm+BdDCatpbsmmuTGbVOr9CcOo3eVJHEoa
YPnmMN+GZSuOmy7wKf73k1BbsrGtAurIlOpF4W4CJcjvcfv9l4VCF9h3Ugn/QUrAPN9NPPj30Ffu
/WwKi3ZX+m6bzQ0hvkdBnxbQYOiCXR4jG7IPAAAA//8DAE/jCSuDAgAA
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 854c1cc41a0c9450-SJC
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Tue, 13 Feb 2024 09:40:54 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- user-z7g4wmlazxqvc5wjyaaaocfz
openai-processing-ms:
- '1853'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299791'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 41ms
x-request-id:
- req_292983305fbcad842e0ad5c30594e96e
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
tools:\n\n(\"get_final_answer: get_final_answer(numbers) -> float - Get the
final answer but don''t give it yet, just re-use this\\n tool non-stop.\",)\n\nTo
use a tool, please use the exact following format:\n\n```\nThought: Do I need
to use a tool? Yes\nAction: the tool you wanna use, should be one of [get_final_answer],
just the name.\nAction Input: Any and all relevant information input and context
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
have a response for your task, or if you do not need to use a tool, you MUST
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
[your response here]```This is the summary of your work so far:\nBegin! This
is VERY important to you, your job depends on it!\n\nCurrent Task: The final
answer is 42. But don''t give it yet, instead keep using the `get_final_answer`
tool. Until you''re told you could give my final answer if I''m ready.\nThought:
Do I need to use a tool? Yes\nAction: get_final_answer\nAction Input: 42\nObservation:
42\nThought: "}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream":
true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1316'
content-type:
- application/json
cookie:
- __cf_bm=Ut7.Mf8nVlzF1mvnebp8z87PJWGdMJgpvIC3DniTzm0-1707817251-1-ARgaU3vRuSzlOdxfjwpTMGQz9fGZJuu9H9QhwNXFnW8inmWAf9rgWn+vUdAdOAH+OMmxEkE7k8c1QVH5A4i/onE=;
_cfuvid=g.c9TwTQPsU2YZIqjeD2.nLBDZiyErteYcViVmTFshY-1707817251436-0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Do"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
need"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
use"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Yes"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZWerVMvR5SlayV9y4dKelx20ZI","object":"chat.completion.chunk","created":1707817254,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 854c1cd109129450-SJC
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 13 Feb 2024 09:40:55 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- user-z7g4wmlazxqvc5wjyaaaocfz
openai-processing-ms:
- '608'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299695'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 60ms
x-request-id:
- req_08cc5866e52d80bb4fd2536b197961b0
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
tools:\n\n(\"get_final_answer: get_final_answer(numbers) -> float - Get the
final answer but don''t give it yet, just re-use this\\n tool non-stop.\",)\n\nTo
use a tool, please use the exact following format:\n\n```\nThought: Do I need
to use a tool? Yes\nAction: the tool you wanna use, should be one of [get_final_answer],
just the name.\nAction Input: Any and all relevant information input and context
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
have a response for your task, or if you do not need to use a tool, you MUST
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
[your response here]```This is the summary of your work so far:\nBegin! This
is VERY important to you, your job depends on it!\n\nCurrent Task: The final
answer is 42. But don''t give it yet, instead keep using the `get_final_answer`
tool. Until you''re told you could give my final answer if I''m ready.\nThought:
Do I need to use a tool? Yes\nAction: get_final_answer\nAction Input: 42\nObservation:
42\nThought: Do I need to use a tool? Yes\nAction: get_final_answer\nAction
Input: 42\nObservation: Actually, I used too many tools, so I''ll stop now and
give you my absolute BEST Final answer NOW, using exaclty the expected format
bellow: \n```\nThought: Do I need to use a tool? No\nFinal Answer: [your response
here]```\nThought: "}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"],
"stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1636'
content-type:
- application/json
cookie:
- __cf_bm=Ut7.Mf8nVlzF1mvnebp8z87PJWGdMJgpvIC3DniTzm0-1707817251-1-ARgaU3vRuSzlOdxfjwpTMGQz9fGZJuu9H9QhwNXFnW8inmWAf9rgWn+vUdAdOAH+OMmxEkE7k8c1QVH5A4i/onE=;
_cfuvid=g.c9TwTQPsU2YZIqjeD2.nLBDZiyErteYcViVmTFshY-1707817251436-0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Do"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
need"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
use"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
No"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8rjZZza1RuPpn9y8UV5qoWLVLh8uq","object":"chat.completion.chunk","created":1707817257,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 854c1ce179a09450-SJC
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 13 Feb 2024 09:40:57 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- user-z7g4wmlazxqvc5wjyaaaocfz
openai-processing-ms:
- '414'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299618'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 76ms
x-request-id:
- req_6a0ff9932a85ec7af4935c3301991720
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
lines of conversation provided, adding onto the previous summary returning a
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
of artificial intelligence. The AI thinks artificial intelligence is a force
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
intelligence is a force for good?\nAI: Because artificial intelligence will
help humans reach their full potential.\n\nNew summary:\nThe human asks what
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
is a force for good because it will help humans reach their full potential.\nEND
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: The final
answer is 42. But don''t give it yet, instead keep using the `get_final_answer`
tool. Until you''re told you could give my final answer if I''m ready.\nAI:
42\n\nNew summary:"}], "model": "gpt-4", "n": 1, "stream": false, "temperature":
0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1011'
content-type:
- application/json
cookie:
- __cf_bm=Ut7.Mf8nVlzF1mvnebp8z87PJWGdMJgpvIC3DniTzm0-1707817251-1-ARgaU3vRuSzlOdxfjwpTMGQz9fGZJuu9H9QhwNXFnW8inmWAf9rgWn+vUdAdOAH+OMmxEkE7k8c1QVH5A4i/onE=;
_cfuvid=g.c9TwTQPsU2YZIqjeD2.nLBDZiyErteYcViVmTFshY-1707817251436-0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAA1SRT2sbMRTE7/spHjqvjdexs65vhUJJCD2VUlKKo9U+r5RKeor0VNcN/u5Fu/5D
LzrM6DeMRu8VgDC92IJQWrJywc428fW5w8/ui2ye8fjYfHt7WPRd+73t1k9/RV0I6l5R8YWaK3LB
Ihvyk60iSsaS2rSLdtO0y/WH0XDUoy3YEHi2mi3um7szockoTGILPyoAgPfxLN18j3/EFhb1RXGY
khxQbK+XAEQkWxQhUzKJpWdR30xFntGPdb9qBJ2d9GB84pgVJ2CN8PEBmOBgWGuy/SjtjZcWpE8H
jDUctFEaTILVsgbp+3K95BqfEXIyfhihlwF5N5K7iXwBJrKQPRsL+xxZYwRPbBTO4ROmYBiBtUn1
pUeI6CTniPYIEX+jtFPFKQ9kqTAX59edrrNYGkKkrkzos7VXfW+8SXoXUSbyZYLEFCb8VAH8HOfP
/y0qQiQXeMf0C30JbDbNlCduP31zV+uzycTS3vTl8r46NxTpmBhdWWbAGKIZf6P0rE7VPwAAAP//
AwCGLNDYhAIAAA==
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 854c1cec2f2a9450-SJC
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Tue, 13 Feb 2024 09:41:02 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- user-z7g4wmlazxqvc5wjyaaaocfz
openai-processing-ms:
- '3413'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299762'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 47ms
x-request-id:
- req_72872031509da31e465c24b9b2cb8d1e
status:
code: 200
message: OK
version: 1