mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 23:58:34 +00:00
489 lines
19 KiB
YAML
489 lines
19 KiB
YAML
interactions:
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
|
personal goal is: test goalYou have access to ONLY the following tools, use
|
|
one at time:\n\nget_final_answer: get_final_answer(anything: str) -> float -
|
|
Get the final answer but don''t give it yet, just re-use this\n tool
|
|
non-stop.\n\nTo use a tool you MUST use the exact following format:\n\n```\nUse
|
|
Tool: the tool you wanna use, should be one of [get_final_answer] and absolute
|
|
all relevant input and context for using the tool, you must use only one tool
|
|
at once.\nResult: [result of the tool]\n```\n\nTo give your final answer use
|
|
the exact following format:\n\n```\nFinal Answer: [THE MOST COMPLETE ANSWE WITH
|
|
ALL CONTEXT, DO NOT LEAVE ANYTHING OUT]\n```\nI MUST use these formats, my jobs
|
|
depends on it!This is the summary of your work so far:\n\n\nCurrent Task: The
|
|
final answer is 42. But don''t give it until I tell you so, instead keep using
|
|
the `get_final_answer` tool.\n\n Begin! This is VERY important to you, your
|
|
job depends on it!\n\n\n"}], "model": "gpt-4-0125-preview", "n": 1, "stop":
|
|
["\nResult"], "stream": true, "temperature": 0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate, br
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1145'
|
|
content-type:
|
|
- application/json
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: 'data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"content":"Use"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"content":"
|
|
Tool"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"content":"
|
|
get"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSlvb0E49ZRvWth5FKSw9Me8O4a","object":"chat.completion.chunk","created":1708543351,"model":"gpt-4-0125-preview","system_fingerprint":"fp_f084bcfc79","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
|
|
|
|
|
data: [DONE]
|
|
|
|
|
|
'
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 85915bc79a85a537-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Type:
|
|
- text/event-stream
|
|
Date:
|
|
- Wed, 21 Feb 2024 19:22:31 GMT
|
|
Server:
|
|
- cloudflare
|
|
Set-Cookie:
|
|
- __cf_bm=yNWN6xxJzIde4UgAgONRHE3BZJmmCPGu7Ch2r8cft9A-1708543351-1.0-ASCEMu/lL3CxJvBcFTc1tUBtRrna4KuvHkF7gIGXFGT7IzdbHGcbdzKpMclXH0YRkPX7kIeGPEhgHxYfptOxLlM=;
|
|
path=/; expires=Wed, 21-Feb-24 19:52:31 GMT; domain=.api.openai.com; HttpOnly;
|
|
Secure; SameSite=None
|
|
- _cfuvid=rc0f50rp.We1R7UrhqtjVUfOr0VA90lt_6hQ5eO2240-1708543351878-0.0-604800000;
|
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0125-preview
|
|
openai-organization:
|
|
- crewai-iuxna1
|
|
openai-processing-ms:
|
|
- '595'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '800000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '799739'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 19ms
|
|
x-request-id:
|
|
- req_5811f8911568932229521026f504c92b
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "Tools available:\n###\nTool
|
|
Name: get_final_answer\nTool Description: get_final_answer(anything: str) ->
|
|
float - Get the final answer but don''t give it yet, just re-use this\n tool
|
|
non-stop.\nTool Arguments: {''anything'': {''type'': ''string''}}\n\nReturn
|
|
a valid schema for the tool, the tool name must be equal one of the options,
|
|
use this text to inform a valid ouput schema:\nUse Tool: get_final_answer```"},
|
|
{"role": "system", "content": "The schema should have the following structure,
|
|
only two keys:\n- tool_name: str\n- arguments: dict (with all arguments being
|
|
passed)\n\nExample:\n{\"tool_name\": \"tool name\", \"arguments\": {\"arg_name1\":
|
|
\"value\", \"arg_name2\": 2}}"}], "model": "gpt-4-0125-preview", "tool_choice":
|
|
{"type": "function", "function": {"name": "InstructorToolCalling"}}, "tools":
|
|
[{"type": "function", "function": {"name": "InstructorToolCalling", "description":
|
|
"Correctly extracted `InstructorToolCalling` with all the required parameters
|
|
with correct types", "parameters": {"properties": {"tool_name": {"description":
|
|
"The name of the tool to be called.", "title": "Tool Name", "type": "string"},
|
|
"arguments": {"anyOf": [{"type": "object"}, {"type": "null"}], "description":
|
|
"A dictinary of arguments to be passed to the tool.", "title": "Arguments"}},
|
|
"required": ["arguments", "tool_name"], "type": "object"}}}]}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate, br
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1393'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=yNWN6xxJzIde4UgAgONRHE3BZJmmCPGu7Ch2r8cft9A-1708543351-1.0-ASCEMu/lL3CxJvBcFTc1tUBtRrna4KuvHkF7gIGXFGT7IzdbHGcbdzKpMclXH0YRkPX7kIeGPEhgHxYfptOxLlM=;
|
|
_cfuvid=rc0f50rp.We1R7UrhqtjVUfOr0VA90lt_6hQ5eO2240-1708543351878-0.0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: !!binary |
|
|
IZgMACD+N1V3LdO/N7tV1pTS8HEYIZmEpuae///mg0OL9qBoLVr3RK+AEgw7aZhNwQ/+P9DHdE4f
|
|
YzyNShGHvMC2zx4SIpgUjKBWslYu2vascS9u+7kt3ieft1fvT8OPLL74/ufNJj2t0EqIEJa/WtWc
|
|
TMfFwDLBQ8jI5Xs6BaP+tDcbj4bD8YD4yIVUWzBCHuv2qN3rD8btKOmy3nDvroJRugKjn4SI6DCa
|
|
EOH79yMw6rVWSW8MFQz4JyKUwWowgqwqU9XS12gBQPq6WUa+sZa4VIdghZLWCsd79DAHynpVWive
|
|
Vu7zPLvdngyfR0/bbzNW5evqu76SiW8v7uKhKE+MXgxFG2MCc0Tw0vGrVz6U++9C+RqCPZHWGp9v
|
|
5hLh0gtghAN/sne8dJqDceS6Fpnx0grpq40uOVqcrcLBDhwGhFXSq+dbaiqZa47/fygE/VcPmNOL
|
|
yvyrfh1syGMZllVhkAYPEa8cWwMjVHWIYvz/hGhhEsPmli8hlsHFWtThT/sKjAaTUUNghQAh98cF
|
|
/DQCQPMG03lyShaZCTZEZnyuy1gaCzmALIqsNxstVaamcyT/iQED
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 85915bcfcd52a537-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- br
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Wed, 21 Feb 2024 19:22:34 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0125-preview
|
|
openai-organization:
|
|
- crewai-iuxna1
|
|
openai-processing-ms:
|
|
- '1629'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '800000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '799825'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 13ms
|
|
x-request-id:
|
|
- req_fdf979d873b6bedc600dbe2d9a119f05
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
|
personal goal is: test goalYou have access to ONLY the following tools, use
|
|
one at time:\n\nget_final_answer: get_final_answer(anything: str) -> float -
|
|
Get the final answer but don''t give it yet, just re-use this\n tool
|
|
non-stop.\n\nTo use a tool you MUST use the exact following format:\n\n```\nUse
|
|
Tool: the tool you wanna use, should be one of [get_final_answer] and absolute
|
|
all relevant input and context for using the tool, you must use only one tool
|
|
at once.\nResult: [result of the tool]\n```\n\nTo give your final answer use
|
|
the exact following format:\n\n```\nFinal Answer: [THE MOST COMPLETE ANSWE WITH
|
|
ALL CONTEXT, DO NOT LEAVE ANYTHING OUT]\n```\nI MUST use these formats, my jobs
|
|
depends on it!This is the summary of your work so far:\n\n\nCurrent Task: The
|
|
final answer is 42. But don''t give it until I tell you so, instead keep using
|
|
the `get_final_answer` tool.\n\n Begin! This is VERY important to you, your
|
|
job depends on it!\n\n\nUse Tool: get_final_answer\nResult: 42\n\nIf you don''t
|
|
need to use any more tools, make sure use the correct format to give your final
|
|
answer:\n\n```Final Answer: [entire content of your most complete final answer
|
|
goes here]```\n You MUST use these formats, your jobs depends on it!\n"}], "model":
|
|
"gpt-4-0125-preview", "n": 1, "stop": ["\nResult"], "stream": true, "temperature":
|
|
0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate, br
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1430'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=yNWN6xxJzIde4UgAgONRHE3BZJmmCPGu7Ch2r8cft9A-1708543351-1.0-ASCEMu/lL3CxJvBcFTc1tUBtRrna4KuvHkF7gIGXFGT7IzdbHGcbdzKpMclXH0YRkPX7kIeGPEhgHxYfptOxLlM=;
|
|
_cfuvid=rc0f50rp.We1R7UrhqtjVUfOr0VA90lt_6hQ5eO2240-1708543351878-0.0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: 'data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{"content":"
|
|
Answer"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{"content":"
|
|
"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8umSotD3VfmGvWS89RCR98d94YN56","object":"chat.completion.chunk","created":1708543354,"model":"gpt-4-0125-preview","system_fingerprint":"fp_d0c0cfacb3","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
|
|
|
|
|
data: [DONE]
|
|
|
|
|
|
'
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 85915bdb9be6a537-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Type:
|
|
- text/event-stream
|
|
Date:
|
|
- Wed, 21 Feb 2024 19:22:34 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0125-preview
|
|
openai-organization:
|
|
- crewai-iuxna1
|
|
openai-processing-ms:
|
|
- '475'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '800000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '799670'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 24ms
|
|
x-request-id:
|
|
- req_d2c8ba56102d51a77a7088ed56eaf8c5
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
|
lines of conversation provided, adding onto the previous summary returning a
|
|
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
|
of artificial intelligence. The AI thinks artificial intelligence is a force
|
|
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
|
intelligence is a force for good?\nAI: Because artificial intelligence will
|
|
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
|
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
|
is a force for good because it will help humans reach their full potential.\nEND
|
|
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: The final
|
|
answer is 42. But don''t give it until I tell you so, instead keep using the
|
|
`get_final_answer` tool.\nAI: 42\n\nNew summary:"}], "model": "gpt-4-0125-preview",
|
|
"n": 1, "stream": false, "temperature": 0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate, br
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '977'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=yNWN6xxJzIde4UgAgONRHE3BZJmmCPGu7Ch2r8cft9A-1708543351-1.0-ASCEMu/lL3CxJvBcFTc1tUBtRrna4KuvHkF7gIGXFGT7IzdbHGcbdzKpMclXH0YRkPX7kIeGPEhgHxYfptOxLlM=;
|
|
_cfuvid=rc0f50rp.We1R7UrhqtjVUfOr0VA90lt_6hQ5eO2240-1708543351878-0.0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: !!binary |
|
|
IeAJACB+6vR3L9PTzSGteIsy/V6pBkw7C5ukH94CGShuhxt9PTrq+fS3C7MAp4FHvyUYeGC9TXz6
|
|
g3Bnt6t3TwIgnZAExXno4tpU3rKrX8xmf9xeH+5e3vT3k12v3PXzZM2zzSf1BEAcFSp2nLwfYhBq
|
|
bkggc/meSkhitBguZ9PJZDZDPqo5URVJUGacN/WGo/HMy5Iu1Y57N2cdK0sSPwIATqcZQPPvpyQx
|
|
7EEhGkOTJPwJoJYrRRIUWqutCxtHPYJe+bolCXrNFQZFHDRSiu3AjzvrKzjGTrs85yoBf0p1E1YI
|
|
G7tTbd+tjraYjnPqR4zOKgFRkCnnsxC/EsAxV2pcFSZ9FC9k6ia6ngAhdao6IOqFmgLbTsd9uoO4
|
|
XP6u4sy0HFmSaLqqwkFhOeQ38VojCbKOjYjoIoA/J487vx2Qabk2zndcqsa+GZWJ6F17FHI6Wr1v
|
|
GOjh7Hg0ElYKyvW/4qe6yVRrWu3z7dT46XA5jeI0XqxIXIQBAw==
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 85915be1aca3a537-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- br
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Wed, 21 Feb 2024 19:22:37 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0125-preview
|
|
openai-organization:
|
|
- crewai-iuxna1
|
|
openai-processing-ms:
|
|
- '2510'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '800000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '799774'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 16ms
|
|
x-request-id:
|
|
- req_5850b110143112eb39fed9077edbe8f8
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
version: 1
|