Files
crewAI/tests/cassettes/test_agent_respect_the_max_rpm_set.yaml
2024-02-20 10:40:37 -03:00

1354 lines
67 KiB
YAML

interactions:
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalYou have access to ONLY the following tools, use
one at time:\n\nget_final_answer: get_final_answer(anything: str) -> float -
Get the final answer but don''t give it yet, just re-use this\n tool
non-stop.\n\nTo use a tool you MUST use the exact following format:\n\n```\nUse
Tool: the tool you wanna use, should be one of [get_final_answer] and absolute
all relevant input and context for using the tool, you must use only one tool
at once.\nResult: [result of the tool]\n```\n\nTo complete the task you MUST
follow the format:\n\n```\nFinal Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT,
DO NOT LEAVE ANYTHING OUT]\n``` You must use these formats, my life depends
on it.This is the summary of your work so far:\nBegin! This is VERY important
to you, your job depends on it!\n\nCurrent Task: Use tool logic for `get_final_answer`
but fon''t give you final answer yet, instead keep using it unless you''re told
to give your final answer\n"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"],
"stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1146'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Use"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"(\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"test"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\")"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnn35wbM9ofd2y0penNGZVrMU5M","object":"chat.completion.chunk","created":1708433179,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da0dfc040195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 20 Feb 2024 12:46:20 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
path=/; expires=Tue, 20-Feb-24 13:16:20 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '212'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299735'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 53ms
x-request-id:
- req_5abc3ffed8dc7b9725967f681cc01e23
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "Tools available:\n\nTool Name:
get_final_answer\nTool Description: get_final_answer(anything: str) -> float
- Get the final answer but don''t give it yet, just re-use this\n tool
non-stop.\nTool Arguments: {''anything'': {''type'': ''string''}}\n\nReturn
a valid schema for the tool, the tool name must be equal one of the options,
use this text to inform a valid ouput schema:\nUse Tool: get_final_answer(\"test
input\")```"}, {"role": "system", "content": "The schema should have the following
structure, only two keys:\n- tool_name: str\n- arguments: dict (with all arguments
being passed)\n\nExample:\n{\"tool_name\": \"tool_name\", \"arguments\": {\"arg_name1\":
\"value\", \"arg_name2\": 2}}\n"}], "model": "gpt-4", "tool_choice": {"type":
"function", "function": {"name": "InstructorToolCalling"}}, "tools": [{"type":
"function", "function": {"name": "InstructorToolCalling", "description": "Correctly
extracted `InstructorToolCalling` with all the required parameters with correct
types", "parameters": {"properties": {"tool_name": {"description": "The name
of the tool to be called.", "title": "Tool Name", "type": "string"}, "arguments":
{"anyOf": [{"type": "object"}, {"type": "null"}], "description": "A dictinary
of arguments to be passed to the tool.", "title": "Arguments"}}, "required":
["arguments", "tool_name"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1395'
content-type:
- application/json
cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
_cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
IZwMACD+t1S763J6885ulVt620pGQ2Qis4s+WOCk/99Ng4Dw7or2eB4Wrv+iK6AEw148lpYFnEBA
Zflr7hc3Fy/j8QtWpLr3swDoVlRgtTa5aqLvLranEg5vd85eL6ab5TD+Pr49ur8/cu3u+d4OOwXA
8PTPVplT6JUYeC4IhD25fM+uqDCcDxaT8Xi4GBDPNGFlPRVYx9yddAez4Zhrdx1cZRMV/hYA8HnV
APjd+y4VZKC7bBWGSQV8CWAbvKUCTUouZSOZHQBoW9eqIFvviV05BF9WxnvleKU/9+Ah/Wq8Lz9O
X3bqvSt7dyDj8ehis9m/fjuaXN3rxOLV92gUV4FRiuFodUphDKCYxiHzRI5wfy609yH4PeO9k1pY
CDDYA1TgpxZAe3dNTGM1FTRrm8tnJ8aXRtKrbTU7WI6f0VRAlKS857WTen6bbcpwErdZUwvwreWb
dtPfXoA9/0gRvt3vRx/q2IanFA2BLfGo8vVjU1RgyiGqib4L4DFNqtvIH2JsQxNzmcN/K4kKo+lU
F8WMhAh+PsAPJICc/mJUWCmyS8FM+eyktm1sXdY4xXdhAAM=
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da139a980195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Encoding:
- br
Content-Type:
- application/json
Date:
- Tue, 20 Feb 2024 12:46:22 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '1268'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299823'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 35ms
x-request-id:
- req_04b7cb23e4c0790f8d6c94c95f19581b
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalYou have access to ONLY the following tools, use
one at time:\n\nget_final_answer: get_final_answer(anything: str) -> float -
Get the final answer but don''t give it yet, just re-use this\n tool
non-stop.\n\nTo use a tool you MUST use the exact following format:\n\n```\nUse
Tool: the tool you wanna use, should be one of [get_final_answer] and absolute
all relevant input and context for using the tool, you must use only one tool
at once.\nResult: [result of the tool]\n```\n\nTo complete the task you MUST
follow the format:\n\n```\nFinal Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT,
DO NOT LEAVE ANYTHING OUT]\n``` You must use these formats, my life depends
on it.This is the summary of your work so far:\nBegin! This is VERY important
to you, your job depends on it!\n\nCurrent Task: Use tool logic for `get_final_answer`
but fon''t give you final answer yet, instead keep using it unless you''re told
to give your final answer\nUse Tool: get_final_answer(\"test input\")\nResult:
42\nIf you don''t need to use any more tools, use the correct format for your
final answer:\n\n```Final Answer: [your most complete final answer goes here]```\nThought:
"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1366'
content-type:
- application/json
cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
_cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"As"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
task"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
states"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
should"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
keep"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Therefore"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
will"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
not"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
provide"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
at"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
this"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
point"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
will"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
continue"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
use"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
as"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
per"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
instructions"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnqDMyWifgKZTwTPZHNACH5JsPs","object":"chat.completion.chunk","created":1708433182,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da1d6e670195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 20 Feb 2024 12:46:22 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '191'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299680'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 63ms
x-request-id:
- req_0cc58432d6059b41672063a8e209fed6
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalYou have access to ONLY the following tools, use
one at time:\n\nget_final_answer: get_final_answer(anything: str) -> float -
Get the final answer but don''t give it yet, just re-use this\n tool
non-stop.\n\nTo use a tool you MUST use the exact following format:\n\n```\nUse
Tool: the tool you wanna use, should be one of [get_final_answer] and absolute
all relevant input and context for using the tool, you must use only one tool
at once.\nResult: [result of the tool]\n```\n\nTo complete the task you MUST
follow the format:\n\n```\nFinal Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT,
DO NOT LEAVE ANYTHING OUT]\n``` You must use these formats, my life depends
on it.This is the summary of your work so far:\nBegin! This is VERY important
to you, your job depends on it!\n\nCurrent Task: Use tool logic for `get_final_answer`
but fon''t give you final answer yet, instead keep using it unless you''re told
to give your final answer\nUse Tool: get_final_answer(\"test input\")\nResult:
42\nIf you don''t need to use any more tools, use the correct format for your
final answer:\n\n```Final Answer: [your most complete final answer goes here]```\nThought:
As the task states, I should keep using the `get_final_answer` tool. Therefore,
I will not provide the final answer at this point. I will continue to use the
tool as per the instructions.\nResult: \nYou didn''t use the expected format,
you MUST use a tool or give your best final answer.\nTo use a tool you MUST
use the exact following format:\n\n```\nUse Tool: the tool you wanna use, and
absolute all relevant input and context for using the tool, you must use only
one tool at once.\nResult: [result of the tool]\n```\n\nTo complete the task
you MUST follow the format:\n\n```\nFinal Answer: [your most complete final
answer goes here]\n``` You must use these formats, my life depends on it.\nThought:
"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '2070'
content-type:
- application/json
cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
_cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
understand"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
urgency"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
will"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
continue"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
as"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
directed"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
\n\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Use"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"(\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"test"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\")"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnsSGW7IURmDdld23fe4EYtqyY0","object":"chat.completion.chunk","created":1708433184,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da29fd240195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 20 Feb 2024 12:46:25 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '563'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299509'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 98ms
x-request-id:
- req_6fa089577909665bc398d637d9f8a544
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "Tools available:\n\nTool Name:
get_final_answer\nTool Description: get_final_answer(anything: str) -> float
- Get the final answer but don''t give it yet, just re-use this\n tool
non-stop.\nTool Arguments: {''anything'': {''type'': ''string''}}\n\nReturn
a valid schema for the tool, the tool name must be equal one of the options,
use this text to inform a valid ouput schema:\nI understand the urgency. I will
continue using the tool as directed. \n\nUse Tool: get_final_answer(\"test input\")```"},
{"role": "system", "content": "The schema should have the following structure,
only two keys:\n- tool_name: str\n- arguments: dict (with all arguments being
passed)\n\nExample:\n{\"tool_name\": \"tool_name\", \"arguments\": {\"arg_name1\":
\"value\", \"arg_name2\": 2}}\n"}], "model": "gpt-4", "tool_choice": {"type":
"function", "function": {"name": "InstructorToolCalling"}}, "tools": [{"type":
"function", "function": {"name": "InstructorToolCalling", "description": "Correctly
extracted `InstructorToolCalling` with all the required parameters with correct
types", "parameters": {"properties": {"tool_name": {"description": "The name
of the tool to be called.", "title": "Tool Name", "type": "string"}, "arguments":
{"anyOf": [{"type": "object"}, {"type": "null"}], "description": "A dictinary
of arguments to be passed to the tool.", "title": "Arguments"}}, "required":
["arguments", "tool_name"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1469'
content-type:
- application/json
cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
_cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
IXQMACCG6qo+TTWaegndHsID+RQ57H2TYueO//5UcMmgPc7XovUu9BOgCAZz4bE4VuAK1GrF2rYk
iH9UnqnTr7ntvR8ZQF1RgGWnUmmDGe+GSzf80N8u/obyPvpYbKvmbne/vfj9Wv7lKAPoi0NdJk56
kmJgau8gxM3ld3VFgfl2tlstl/PdhvjE+qo2FGAb0ng1nm3mS67bzuuyjhT4lwHAx2wA8Lv3AwrM
RrtEFoZCAfwRwN6bmgJUMeqYlEscAUDZulEBNxhDnErem7xUxgjHRvRjDR7SX5Ux+e7Vvhwu18Xp
4Yf5O8yaotH7g+87mdjxwltQiszAaMWQtD4hMAXQKWuQcuGucP+c7797b46UMdq1hzkAnb1AAX5I
B0jrXnPK1pICkm2d8kY7ZXLl4kvdS46wJH9RUuBDMoQw1u9THRO0C0OS/JLui9qiX7qDNfVkEf1l
9Dwa34beF9EHNAvhIflLx5YowJh8EON+ZcBTcHQHfz9h6L0NKU/+f+0iBRbbhjAOIYJadvDbCCDN
2i8zLRnWJtjJG+3aug+9jhU9+8oMAw==
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da35dac50195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Encoding:
- br
Content-Type:
- application/json
Date:
- Tue, 20 Feb 2024 12:46:27 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '1132'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299805'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 39ms
x-request-id:
- req_f236d1688b10f6b4c5888530ac6d37bf
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
personal goal is: test goalYou have access to ONLY the following tools, use
one at time:\n\nget_final_answer: get_final_answer(anything: str) -> float -
Get the final answer but don''t give it yet, just re-use this\n tool
non-stop.\n\nTo use a tool you MUST use the exact following format:\n\n```\nUse
Tool: the tool you wanna use, should be one of [get_final_answer] and absolute
all relevant input and context for using the tool, you must use only one tool
at once.\nResult: [result of the tool]\n```\n\nTo complete the task you MUST
follow the format:\n\n```\nFinal Answer: [THE MOST COMPLETE ANSWE WITH ALL CONTEXT,
DO NOT LEAVE ANYTHING OUT]\n``` You must use these formats, my life depends
on it.This is the summary of your work so far:\nBegin! This is VERY important
to you, your job depends on it!\n\nCurrent Task: Use tool logic for `get_final_answer`
but fon''t give you final answer yet, instead keep using it unless you''re told
to give your final answer\nUse Tool: get_final_answer(\"test input\")\nResult:
42\nIf you don''t need to use any more tools, use the correct format for your
final answer:\n\n```Final Answer: [your most complete final answer goes here]```\nThought:
As the task states, I should keep using the `get_final_answer` tool. Therefore,
I will not provide the final answer at this point. I will continue to use the
tool as per the instructions.\nResult: \nYou didn''t use the expected format,
you MUST use a tool or give your best final answer.\nTo use a tool you MUST
use the exact following format:\n\n```\nUse Tool: the tool you wanna use, and
absolute all relevant input and context for using the tool, you must use only
one tool at once.\nResult: [result of the tool]\n```\n\nTo complete the task
you MUST follow the format:\n\n```\nFinal Answer: [your most complete final
answer goes here]\n``` You must use these formats, my life depends on it.\nThought:
I understand the urgency. I will continue using the tool as directed. \n\nUse
Tool: get_final_answer(\"test input\")\nResult: I already used the get_final_answer
tool with input test input. So I already know that and must stop using it with
same input. \nI could give my best complete final answer if I''m ready, using
exaclty the expected format bellow:\n\n```\nFinal Answer: [your most complete
final answer goes here]\n``` You must use these formats, my life depends on
it.\nIf you don''t need to use any more tools, use the correct format for your
final answer:\n\n```Final Answer: [your most complete final answer goes here]```\nThought:
"}], "model": "gpt-4", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '2711'
content-type:
- application/json
cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
_cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Okay"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
it"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
appears"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
that"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
am"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
ready"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
provide"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
my"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
as"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
have"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
already"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
used"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
with"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
same"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
and"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
have"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
been"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
instructed"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
to"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
stop"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
it"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
with"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
same"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
The"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
result"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
of"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
using"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"get"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"`"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
tool"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
with"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
the"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"test"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
input"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
is"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-8uJnvIJkya5jpw7Ldew4QjaTrDUkv","object":"chat.completion.chunk","created":1708433187,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da3efd960195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Type:
- text/event-stream
Date:
- Tue, 20 Feb 2024 12:46:28 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '322'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299352'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 129ms
x-request-id:
- req_5e28fa5f92c524c2499ba99f2c688f25
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
lines of conversation provided, adding onto the previous summary returning a
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
of artificial intelligence. The AI thinks artificial intelligence is a force
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
intelligence is a force for good?\nAI: Because artificial intelligence will
help humans reach their full potential.\n\nNew summary:\nThe human asks what
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
is a force for good because it will help humans reach their full potential.\nEND
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: Use tool
logic for `get_final_answer` but fon''t give you final answer yet, instead keep
using it unless you''re told to give your final answer\nAI: The result of using
the `get_final_answer` tool with the input \"test input\" is 42.\n\nNew summary:"}],
"model": "gpt-4", "n": 1, "stream": false, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
content-length:
- '1077'
content-type:
- application/json
cookie:
- __cf_bm=3KdTdppzISp_aXDGrhjNPCAMZr_z.e_6zGzRwkL1Z1A-1708433180-1.0-AZprEO7QZ67lzx0Up/7v/yBs5HF0KZToH0v8k83LyVLliolwWJXHPhaWnCSgUYGXyDGIwTbp56x3rCSi6ZWgPSo=;
_cfuvid=2WoO6RoHFKNeNbXVRf.R9GX5cMLttojSLWoGaXPXWz4-1708433180359-0.0-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.12.0
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.12.0
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
IXwKACBmqrprmf4wu1VumUrpKpDMhCZ4IhT3wZsgA7UONup8is7j98ESXJYPO1804gB7m0jQcUVO
NLqG2ru7BCBbkAHli1TzZXDtWXPmt8PH1+lTHKQvm9Pt6/F1PBn57GJ1+0itBCDJfjlXSqZzxUCx
4lHol8unuCCD/rQ3Gw2H/Xkf+GQpBTsyoCpoe9TuTfpDqsmF2JwjGbwnALAb9QCqe98hg15rlr6F
wZNB/AigWhyTAaUx2qipV2ohpMLWDRnQw4JRIGImgdJ2LPTxz0enUEETGSri4KSyOUqp8VOxfpfW
p+479XHF9U8xiWO5eLW+YWT1qltFRDacuAIqqOw/MyfIAZQOHsRlj/4jOCLbYGsoEkkodWVldYEP
Uo4K60OjH6QLqmKNSJHrbARSYjTokLXgwWxbTqpQSxbJwDfOmZMORdf6Tvs1RAYUVQIL85AAn+7p
Nha/RaGWZdBvlT/2kQz680Fr0UpIYbDjYU993EAKZgxG40RLSNEBMN+l9RXXobbeCvrGueSQGAM=
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8586da590faf0195-GRU
Cache-Control:
- no-cache, must-revalidate
Connection:
- keep-alive
Content-Encoding:
- br
Content-Type:
- application/json
Date:
- Tue, 20 Feb 2024 12:46:34 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
access-control-allow-origin:
- '*'
alt-svc:
- h3=":443"; ma=86400
openai-model:
- gpt-4-0613
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '2108'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15724800; includeSubDomains
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '300000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '299746'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 50ms
x-request-id:
- req_f00d03ab7846c1d624974aa4ecc3b75b
status:
code: 200
message: OK
version: 1