Merge branch 'main' of github.com:joaomdmoura/crewAI into test-hierarchical-tools-proper-setup

This commit is contained in:
Lorenze Jay
2024-07-17 08:16:44 -07:00
15 changed files with 9325 additions and 946 deletions

View File

@@ -0,0 +1,151 @@
interactions:
- request:
body: '{"messages": [{"content": "You are Researcher. You''re an expert researcher,
specialized in technology, software engineering, AI and startups. You work as
a freelancer and is now working on doing research and analysis for a new customer.\nYour
personal goal is: Make the best research and analysis on content about AI and
AI agentsTo give my best complete final answer to the task use the exact following
format:\n\nThought: I now can give a great answer\nFinal Answer: my best complete
final answer to the task.\nYour final answer must be the great and the most
complete as possible, it must be outcome described.\n\nI MUST use these formats,
my job depends on it!\nCurrent Task: Say Hi\n\nThis is the expect criteria for
your final answer: Hi \n you MUST return the actual complete content as the
final answer, not a summary.\n\nBegin! This is VERY important to you, use the
tools available and give your best Final Answer, your job depends on it!\n\nThought:\n",
"role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"], "stream":
true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '1072'
content-type:
- application/json
cookie:
- _cfuvid=Nxp5RE8CN2EyHxMIvB_SaTizIH5w0eWt9SPilMuIjMk-1721227661802-0.0.1.1-604800000;
__cf_bm=jadAYV2gh7qPDzgKO9A4JzJTaI9c2fnnjxloIQZeOIw-1721227661-1.0.1.1-apaA8kQyGiEV3kOuXHe8z1zeyvxd_jBHCQpdqWirUlylrUo.uRZjRDueI.sSXS4hXoWkyIW6kIMt7lamQM2mdw
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.35.10
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.35.10
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.12.3
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
now"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
can"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
great"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":".\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
Hi"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9m0FB4Oy6X0apYX2wcQ30rTBkmtxT","object":"chat.completion.chunk","created":1721227709,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8a4b087e7d024593-ATL
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Wed, 17 Jul 2024 14:48:29 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '142'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999753'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_75100793afc289eaf8b56127e1cc0532
status:
code: 200
message: OK
version: 1

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,262 @@
interactions:
- request:
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
final answer to the task use the exact following format:\n\nThought: I now can
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
final answer must be the great and the most complete as possible, it must be
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
Task: Give me an integer score between 1-5 for the following title: ''The impact
of AI in the future of work''\n\nThis is the expect criteria for your final
answer: The score of the title. \n you MUST return the actual complete content
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
use the tools available and give your best Final Answer, your job depends on
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
"stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '997'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.35.14
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.35.14
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
now"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
can"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
great"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"
"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltULlJQMDDjI2f7PpkjJ7DsxWjEQ","object":"chat.completion.chunk","created":1721201741,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_400f27fa1f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8a488e81aa7f0c7e-EWR
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Wed, 17 Jul 2024 07:35:41 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=Ypr9N3lq.OD8hpimnkpN61rAsWyk216I8Tq7RA8.uwQ-1721201741-1.0.1.1-6Cj4aX9I96QHMmPwJBpO1iCFOJsvzq_agUIrl3XS.YhlPuGyA4K9sDONExvLn.cDe3W_p_1ET7Pt_hxjtHPAXQ;
path=/; expires=Wed, 17-Jul-24 08:05:41 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=NLa1BaUsRvD7shojIzUH9YSRXQIEzaoJVcq2_gNwqm0-1721201741646-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '106'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999771'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_e52461a1ab2702e360f6303fbcb4cc3c
status:
code: 200
message: OK
- request:
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
`ScoreOutput` with all the required parameters with correct types", "parameters":
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
["score"], "type": "object"}}}]}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '519'
content-type:
- application/json
cookie:
- __cf_bm=Ypr9N3lq.OD8hpimnkpN61rAsWyk216I8Tq7RA8.uwQ-1721201741-1.0.1.1-6Cj4aX9I96QHMmPwJBpO1iCFOJsvzq_agUIrl3XS.YhlPuGyA4K9sDONExvLn.cDe3W_p_1ET7Pt_hxjtHPAXQ;
_cfuvid=NLa1BaUsRvD7shojIzUH9YSRXQIEzaoJVcq2_gNwqm0-1721201741646-0.0.1.1-604800000
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.35.14
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.35.14
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: !!binary |
H4sIAAAAAAAAA2xS22rjMBB991eIeY6LHTubrR9LKYUttKWUvbUYRRnb6sqSVhqTJiH/vshxYzes
H8QwZ86FGe8jxkCuoWAgGk6itSq+VPR8d8O/vXXZbiOzx13S5D9/PNwSv8nvYBYYZvWGgj5YF8K0
ViFJo4+wcMgJg2q6nKfzJF3maQ+0Zo0q0GpLcW7ieTLP42QRp9lAbIwU6KFgvyPGGNv3b4io1/gO
BUtmH50Wvec1QnEaYgycUaED3HvpiWuC2QgKowl1SK07pSYAGaNKwZUajY/fflKPe+JKlVe/nqvV
98vNtslul3/Xj7vrzaZ6StOJ31F6a/tAVafFaT8T/NQvzswYA83bnvskjMP7jmxHZ3TGgLu6a1FT
iA77F/Bh+AWK/ACfRg/R/+rXoTqc1qpMbZ1Z+bMtQSW19E3pkPs+LXgy9mgR5F7783WfLgLWmdZS
SeYP6iD4dbgejP/LCC4GjAxxNeEsoiEe+K0nbMtK6hqddbI/JVS2FDkuVl8qnqUQHaJ/AAAA//8D
ACb4o2zTAgAA
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8a488e858ca50c7e-EWR
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Type:
- application/json
Date:
- Wed, 17 Jul 2024 07:35:42 GMT
Server:
- cloudflare
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '241'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999968'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_01c2f40fe9c73f883b7ed5b60c8067e5
status:
code: 200
message: OK
version: 1

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,159 @@
interactions:
- request:
body: '{"messages": [{"content": "You are test_agent. Test Description\nYour personal
goal is: Test GoalTo give my best complete final answer to the task use the
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
my best complete final answer to the task.\nYour final answer must be the great
and the most complete as possible, it must be outcome described.\n\nI MUST use
these formats, my job depends on it!\nCurrent Task: Test Task\n\nThis is the
expect criteria for your final answer: Say Hi \n you MUST return the actual
complete content as the final answer, not a summary.\n\nThis is the context
you''re working with:\ncontext raw output\n\nBegin! This is VERY important to
you, use the tools available and give your best Final Answer, your job depends
on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop":
["\nObservation"], "stream": true, "temperature": 0.7}'
headers:
accept:
- application/json
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '910'
content-type:
- application/json
host:
- api.openai.com
user-agent:
- OpenAI/Python 1.35.14
x-stainless-arch:
- arm64
x-stainless-async:
- 'false'
x-stainless-lang:
- python
x-stainless-os:
- MacOS
x-stainless-package-version:
- 1.35.14
x-stainless-runtime:
- CPython
x-stainless-runtime-version:
- 3.11.7
method: POST
uri: https://api.openai.com/v1/chat/completions
response:
body:
string: 'data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
I"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
now"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
can"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
give"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
a"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
great"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
Answer"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{"content":"
Hi"},"logprobs":null,"finish_reason":null}]}
data: {"id":"chatcmpl-9ltUKsvuhS55Ng70kOmxKNQcksxm4","object":"chat.completion.chunk","created":1721201740,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_c4e5b6fa31","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
data: [DONE]
'
headers:
CF-Cache-Status:
- DYNAMIC
CF-RAY:
- 8a488e7859304288-EWR
Connection:
- keep-alive
Content-Type:
- text/event-stream; charset=utf-8
Date:
- Wed, 17 Jul 2024 07:35:40 GMT
Server:
- cloudflare
Set-Cookie:
- __cf_bm=tBullt04HIYduv1QEpAn86_ghx9PQ4DMe9LwLp7.bu4-1721201740-1.0.1.1-sGn3OYi9ntPGIzwuC4RH0UTUQFVy.BUFIvy9v9IrjorYeAsecsuuuREs7b19i4dXygWnZZEkH6cxvnQeJ62g7g;
path=/; expires=Wed, 17-Jul-24 08:05:40 GMT; domain=.api.openai.com; HttpOnly;
Secure; SameSite=None
- _cfuvid=uSSEccAecytxcWKGJoaQXLVBAwHLrZ.QckpcK..rN48-1721201740355-0.0.1.1-604800000;
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
Transfer-Encoding:
- chunked
X-Content-Type-Options:
- nosniff
alt-svc:
- h3=":443"; ma=86400
openai-organization:
- crewai-iuxna1
openai-processing-ms:
- '83'
openai-version:
- '2020-10-01'
strict-transport-security:
- max-age=15552000; includeSubDomains; preload
x-ratelimit-limit-requests:
- '10000'
x-ratelimit-limit-tokens:
- '30000000'
x-ratelimit-remaining-requests:
- '9999'
x-ratelimit-remaining-tokens:
- '29999793'
x-ratelimit-reset-requests:
- 6ms
x-ratelimit-reset-tokens:
- 0s
x-request-id:
- req_89b4947eaf51788bd7a69d6cc0da6c08
status:
code: 200
message: OK
version: 1

View File

@@ -8,15 +8,14 @@ from unittest.mock import MagicMock, patch
import pydantic_core
import pytest
from crewai.agent import Agent
from crewai.agents.cache import CacheHandler
from crewai.tasks.conditional_task import ConditionalTask
from crewai.crew import Crew
from crewai.crews.crew_output import CrewOutput
from crewai.memory.contextual.contextual_memory import ContextualMemory
from crewai.process import Process
from crewai.task import Task
from crewai.tasks.conditional_task import ConditionalTask
from crewai.tasks.output_format import OutputFormat
from crewai.tasks.task_output import TaskOutput
from crewai.utilities import Logger, RPMController
@@ -991,9 +990,7 @@ async def test_kickoff_async_basic_functionality_and_output():
@pytest.mark.vcr(filter_headers=["authorization"])
@pytest.mark.asyncio
async def test_async_kickoff_for_each_async_basic_functionality_and_output():
"""Tests the basic functionality and output of akickoff_for_each_async."""
from unittest.mock import patch
"""Tests the basic functionality and output of kickoff_for_each_async."""
inputs = [
{"topic": "dog"},
{"topic": "cat"},
@@ -1019,8 +1016,13 @@ async def test_async_kickoff_for_each_async_basic_functionality_and_output():
agent=agent,
)
async def mock_kickoff_async(**kwargs):
input_data = kwargs.get("inputs")
index = [input_["topic"] for input_ in inputs].index(input_data["topic"])
return expected_outputs[index]
with patch.object(
Crew, "kickoff_async", side_effect=expected_outputs
Crew, "kickoff_async", side_effect=mock_kickoff_async
) as mock_kickoff_async:
crew = Crew(agents=[agent], tasks=[task])
@@ -1429,6 +1431,7 @@ def test_crew_inputs_interpolate_both_agents_and_tasks_diff():
interpolate_task_inputs.assert_called()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_crew_does_not_interpolate_without_inputs():
from unittest.mock import patch
@@ -2071,6 +2074,7 @@ def test_replay_task_with_context():
db_handler.reset()
@pytest.mark.vcr(filter_headers=["authorization"])
def test_replay_from_task_with_context():
agent = Agent(role="test_agent", backstory="Test Description", goal="Test Goal")
task1 = Task(
@@ -2188,6 +2192,7 @@ def test_replay_with_invalid_task_id():
crew.replay_from_task("bf5b09c9-69bd-4eb8-be12-f9e5bae31c2d")
@pytest.mark.vcr(filter_headers=["authorization"])
@patch.object(Crew, "_interpolate_inputs")
def test_replay_interpolates_inputs_properly(mock_interpolate_inputs):
agent = Agent(role="test_agent", backstory="Test Description", goal="Test Goal")
@@ -2354,10 +2359,8 @@ def test_conditional_task_requirement_breaks_when_singular_conditional_task():
@pytest.mark.vcr(filter_headers=["authorization"])
def test_conditional_task_last_task():
def test_conditional_task_last_task_when_conditional_is_true():
def condition_fn(output) -> bool:
if output.raw == "Hi":
return False
return True
task1 = Task(
@@ -2377,6 +2380,34 @@ def test_conditional_task_last_task():
tasks=[task1, task2],
)
result = crew.kickoff()
assert result.raw.startswith(
"1. **The Rise of AI Agents in Customer Service: Revolutionizing Customer Interactions**"
)
@pytest.mark.vcr(filter_headers=["authorization"])
def test_conditional_task_last_task_when_conditional_is_false():
def condition_fn(output) -> bool:
return False
task1 = Task(
description="Say Hi",
expected_output="Hi",
agent=researcher,
)
task2 = ConditionalTask(
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
expected_output="5 bullet points with a paragraph for each idea.",
condition=condition_fn,
agent=writer,
)
crew = Crew(
agents=[researcher, writer],
tasks=[task1, task2],
)
result = crew.kickoff()
print(result.raw)
assert result.raw == "Hi"

View File

@@ -318,6 +318,7 @@ def test_output_json_hierarchical():
assert result.to_dict() == {"score": 4}
@pytest.mark.vcr(filter_headers=["authorization"])
def test_json_property_without_output_json():
class ScoreOutput(BaseModel):
score: int
@@ -398,8 +399,8 @@ def test_output_json_dict_hierarchical():
manager_llm=ChatOpenAI(model="gpt-4o"),
)
result = crew.kickoff()
assert {"score": 4} == result.json_dict
assert result.to_dict() == {"score": 4}
assert {"score": 5} == result.json_dict
assert result.to_dict() == {"score": 5}
@pytest.mark.vcr(filter_headers=["authorization"])