mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-07 15:18:29 +00:00
346 lines
12 KiB
YAML
346 lines
12 KiB
YAML
interactions:
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
|
personal goal is: test goal\n\nTOOLS:\n------\nYou have access to the following
|
|
tools:\n\nmultiplier: multiplier(numbers) -> float - Useful for when you need
|
|
to multiply two numbers together. \n\t\t\tThe input to this tool should be a
|
|
comma separated list of numbers of \n\t\t\tlength two, representing the two
|
|
numbers you want to multiply together. \n\t\t\tFor example, `1,2` would be the
|
|
input if you wanted to multiply 1 by 2.\n\nTo use a tool, please use the exact
|
|
following format:\n\n```\nThought: Do I need to use a tool? Yes\nAction: the
|
|
action to take, should be one of [multiplier]\nAction Input: the input to the
|
|
action\nObservation: the result of the action\n```\n\nWhen you have a response
|
|
for your task, or if you do not need to use a tool, you MUST use the format:\n\n```\nThought:
|
|
Do I need to use a tool? No\nFinal Answer: [your response here]\n```\n\t\tThis
|
|
is the summary of your work so far:\n \nBegin! This is VERY important to
|
|
you, your job depends on it!\n\nCurrent Task: What is 3 times 4\n\n"}], "model":
|
|
"gpt-4", "n": 1, "stop": ["\nObservation"], "stream": false, "temperature":
|
|
0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1199'
|
|
content-type:
|
|
- application/json
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.6.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.6.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
content: "{\n \"id\": \"chatcmpl-8XuE6OrjKKco53f0TIqPQEE3nWeNj\",\n \"object\":
|
|
\"chat.completion\",\n \"created\": 1703091650,\n \"model\": \"gpt-4-0613\",\n
|
|
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
|
\"assistant\",\n \"content\": \"Thought: Do I need to use a tool? Yes\\nAction:
|
|
multiplier\\nAction Input: 3,4\\n\"\n },\n \"logprobs\": null,\n \"finish_reason\":
|
|
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 260,\n \"completion_tokens\":
|
|
24,\n \"total_tokens\": 284\n },\n \"system_fingerprint\": null\n}\n"
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 8389719d5cba0110-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- gzip
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Wed, 20 Dec 2023 17:00:52 GMT
|
|
Server:
|
|
- cloudflare
|
|
Set-Cookie:
|
|
- __cf_bm=rIaStuxTRr_ZC91uSFg5cthTUq95O6PBdkxXZ68fLYc-1703091652-1-AZu+nvbL+3bwwQOIKpnYgLf5m5Mp0jfQ2baAlDRl1+FiTPO+/+GjcF4Upw4M8vtfh39ZyWF+l68r83qCS9OpObU=;
|
|
path=/; expires=Wed, 20-Dec-23 17:30:52 GMT; domain=.api.openai.com; HttpOnly;
|
|
Secure; SameSite=None
|
|
- _cfuvid=Ajd5lPskQSkBImLJdkywZGG4vHMkMCBcxb8TonP9OKc-1703091652762-0-604800000;
|
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0613
|
|
openai-organization:
|
|
- user-z7g4wmlazxqvc5wjyaaaocfz
|
|
openai-processing-ms:
|
|
- '2423'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '300000'
|
|
x-ratelimit-limit-tokens_usage_based:
|
|
- '300000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '299729'
|
|
x-ratelimit-remaining-tokens_usage_based:
|
|
- '299729'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 54ms
|
|
x-ratelimit-reset-tokens_usage_based:
|
|
- 54ms
|
|
x-request-id:
|
|
- 8f99f43731fa878eaf0fcbf0719d1b3f
|
|
http_version: HTTP/1.1
|
|
status_code: 200
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
|
personal goal is: test goal\n\nTOOLS:\n------\nYou have access to the following
|
|
tools:\n\nmultiplier: multiplier(numbers) -> float - Useful for when you need
|
|
to multiply two numbers together. \n\t\t\tThe input to this tool should be a
|
|
comma separated list of numbers of \n\t\t\tlength two, representing the two
|
|
numbers you want to multiply together. \n\t\t\tFor example, `1,2` would be the
|
|
input if you wanted to multiply 1 by 2.\n\nTo use a tool, please use the exact
|
|
following format:\n\n```\nThought: Do I need to use a tool? Yes\nAction: the
|
|
action to take, should be one of [multiplier]\nAction Input: the input to the
|
|
action\nObservation: the result of the action\n```\n\nWhen you have a response
|
|
for your task, or if you do not need to use a tool, you MUST use the format:\n\n```\nThought:
|
|
Do I need to use a tool? No\nFinal Answer: [your response here]\n```\n\t\tThis
|
|
is the summary of your work so far:\n \nBegin! This is VERY important to
|
|
you, your job depends on it!\n\nCurrent Task: What is 3 times 4\nThought: Do
|
|
I need to use a tool? Yes\nAction: multiplier\nAction Input: 3,4\n\nObservation:
|
|
12\nThought: \n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream":
|
|
false, "temperature": 0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1305'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=rIaStuxTRr_ZC91uSFg5cthTUq95O6PBdkxXZ68fLYc-1703091652-1-AZu+nvbL+3bwwQOIKpnYgLf5m5Mp0jfQ2baAlDRl1+FiTPO+/+GjcF4Upw4M8vtfh39ZyWF+l68r83qCS9OpObU=;
|
|
_cfuvid=Ajd5lPskQSkBImLJdkywZGG4vHMkMCBcxb8TonP9OKc-1703091652762-0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.6.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.6.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
content: "{\n \"id\": \"chatcmpl-8XuE86Ud94rsOP7VA4Bgxo6RM5XE6\",\n \"object\":
|
|
\"chat.completion\",\n \"created\": 1703091652,\n \"model\": \"gpt-4-0613\",\n
|
|
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
|
\"assistant\",\n \"content\": \"Do I need to use a tool? No\\nFinal Answer:
|
|
3 times 4 is 12.\"\n },\n \"logprobs\": null,\n \"finish_reason\":
|
|
\"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\": 293,\n \"completion_tokens\":
|
|
22,\n \"total_tokens\": 315\n },\n \"system_fingerprint\": null\n}\n"
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 838971ae0f120110-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- gzip
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Wed, 20 Dec 2023 17:00:55 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0613
|
|
openai-organization:
|
|
- user-z7g4wmlazxqvc5wjyaaaocfz
|
|
openai-processing-ms:
|
|
- '2334'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '300000'
|
|
x-ratelimit-limit-tokens_usage_based:
|
|
- '300000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '299703'
|
|
x-ratelimit-remaining-tokens_usage_based:
|
|
- '299703'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 59ms
|
|
x-ratelimit-reset-tokens_usage_based:
|
|
- 59ms
|
|
x-request-id:
|
|
- 44304b182424a8acad8a3121817bea58
|
|
http_version: HTTP/1.1
|
|
status_code: 200
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
|
lines of conversation provided, adding onto the previous summary returning a
|
|
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
|
of artificial intelligence. The AI thinks artificial intelligence is a force
|
|
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
|
intelligence is a force for good?\nAI: Because artificial intelligence will
|
|
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
|
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
|
is a force for good because it will help humans reach their full potential.\nEND
|
|
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: What
|
|
is 3 times 4\nAI: 3 times 4 is 12.\n\nNew summary:"}], "model": "gpt-4", "n":
|
|
1, "stream": false, "temperature": 0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '885'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=rIaStuxTRr_ZC91uSFg5cthTUq95O6PBdkxXZ68fLYc-1703091652-1-AZu+nvbL+3bwwQOIKpnYgLf5m5Mp0jfQ2baAlDRl1+FiTPO+/+GjcF4Upw4M8vtfh39ZyWF+l68r83qCS9OpObU=;
|
|
_cfuvid=Ajd5lPskQSkBImLJdkywZGG4vHMkMCBcxb8TonP9OKc-1703091652762-0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.6.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.6.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
content: "{\n \"id\": \"chatcmpl-8XuEBWLslSBUMDHZRViwpoXcclYOk\",\n \"object\":
|
|
\"chat.completion\",\n \"created\": 1703091655,\n \"model\": \"gpt-4-0613\",\n
|
|
\ \"choices\": [\n {\n \"index\": 0,\n \"message\": {\n \"role\":
|
|
\"assistant\",\n \"content\": \"The human asks the AI what is 3 times
|
|
4, and the AI responds that 3 times 4 is 12.\"\n },\n \"logprobs\":
|
|
null,\n \"finish_reason\": \"stop\"\n }\n ],\n \"usage\": {\n \"prompt_tokens\":
|
|
155,\n \"completion_tokens\": 27,\n \"total_tokens\": 182\n },\n \"system_fingerprint\":
|
|
null\n}\n"
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 838971be49500110-GRU
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- gzip
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Wed, 20 Dec 2023 17:00:58 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0613
|
|
openai-organization:
|
|
- user-z7g4wmlazxqvc5wjyaaaocfz
|
|
openai-processing-ms:
|
|
- '2942'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '300000'
|
|
x-ratelimit-limit-tokens_usage_based:
|
|
- '300000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '299794'
|
|
x-ratelimit-remaining-tokens_usage_based:
|
|
- '299794'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 41ms
|
|
x-ratelimit-reset-tokens_usage_based:
|
|
- 41ms
|
|
x-request-id:
|
|
- 546e9b3713f3ff2d7f9868133efaa3a7
|
|
http_version: HTTP/1.1
|
|
status_code: 200
|
|
version: 1
|