mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-08 15:48:29 +00:00
417 lines
18 KiB
YAML
417 lines
18 KiB
YAML
interactions:
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "You are test role.\ntest backstory\n\nYour
|
|
personal goal is: test goalTOOLS:\n------\nYou have access to only the following
|
|
tools:\n\n(\"get_final_answer: get_final_answer(numbers) -> float - Get the
|
|
final answer but don''t give it yet, just re-use this\\n tool non-stop.\",)\n\nTo
|
|
use a tool, please use the exact following format:\n\n```\nThought: Do I need
|
|
to use a tool? Yes\nAction: the tool you wanna use, should be one of [get_final_answer],
|
|
just the name.\nAction Input: Any and all relevant information input and context
|
|
for using the tool\nObservation: the result of using the tool\n```\n\nWhen you
|
|
have a response for your task, or if you do not need to use a tool, you MUST
|
|
use the format:\n\n```\nThought: Do I need to use a tool? No\nFinal Answer:
|
|
[your response here]```This is the summary of your work so far:\nBegin! This
|
|
is VERY important to you, your job depends on it!\n\nCurrent Task: The final
|
|
answer is 42. But don''t give it yet, instead keep using the `get_final_answer`
|
|
tool.\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"], "stream": true,
|
|
"temperature": 0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1144'
|
|
content-type:
|
|
- application/json
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: 'data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
Do"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
I"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
need"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
to"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
use"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
a"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
tool"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"?"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
Yes"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
get"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_final"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"_answer"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
Input"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
|
"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"42"},"logprobs":null,"finish_reason":null}]}
|
|
|
|
|
|
data: {"id":"chatcmpl-8rhrNESkXDcWpz9wMmVDnG7NCSWdF","object":"chat.completion.chunk","created":1707810673,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
|
|
|
|
|
data: [DONE]
|
|
|
|
|
|
'
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 854b7c230d17963f-SJC
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Type:
|
|
- text/event-stream
|
|
Date:
|
|
- Tue, 13 Feb 2024 07:51:13 GMT
|
|
Server:
|
|
- cloudflare
|
|
Set-Cookie:
|
|
- __cf_bm=NFb4H263Krk9Xr5qV1Ptu9blCVbFcyg1S93yd9V3EKs-1707810673-1-AQNacdg58H0w+6ASjroSAKAOJjd/zBe3YTh2wxFl31Po2s5KRxRKeNVpvyuztgWptRmoZ8TY6DYFXv6usPcAFbk=;
|
|
path=/; expires=Tue, 13-Feb-24 08:21:13 GMT; domain=.api.openai.com; HttpOnly;
|
|
Secure; SameSite=None
|
|
- _cfuvid=44lfswKyrmuvCjCVUHHy8KWhx1htUCS9U2auSStgf9Y-1707810673564-0-604800000;
|
|
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0613
|
|
openai-organization:
|
|
- user-z7g4wmlazxqvc5wjyaaaocfz
|
|
openai-processing-ms:
|
|
- '236'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '300000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '299737'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 52ms
|
|
x-request-id:
|
|
- req_fa1aac5fc97191a0abae61124cc03583
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
- request:
|
|
body: '{"messages": [{"role": "system", "content": "\n The
|
|
schema should have the following structure, only two key:\n -
|
|
tool_name: str\n - arguments: dict (with all
|
|
arguments being passed)\n\n Example:\n {\"tool_name\":
|
|
\"tool_name\", \"arguments\": {\"arg_name1\": \"value\", \"arg_name2\": 2}}\n "},
|
|
{"role": "user", "content": "Tools available:\n\nTool Name: get_final_answer\nTool
|
|
Description: get_final_answer(numbers) -> float - Get the final answer but don''t
|
|
give it yet, just re-use this\n tool non-stop.\nTool Arguments: {''numbers'':
|
|
{}}\n\nReturn a valid schema for the tool, use this text to inform a valid ouput
|
|
schema:\n\nTool Name: get_final_answer\nTool Arguments: 42```"}], "model": "gpt-4",
|
|
"function_call": {"name": "InstructorToolCalling"}, "functions": [{"name": "InstructorToolCalling",
|
|
"description": "Correctly extracted `InstructorToolCalling` with all the required
|
|
parameters with correct types", "parameters": {"properties": {"tool_name": {"description":
|
|
"The name of the tool to be called.", "title": "Tool Name", "type": "string"},
|
|
"arguments": {"description": "A dictinary of arguments to be passed to the tool.",
|
|
"title": "Arguments", "type": "object"}}, "required": ["arguments", "tool_name"],
|
|
"type": "object"}}]}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '1427'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=NFb4H263Krk9Xr5qV1Ptu9blCVbFcyg1S93yd9V3EKs-1707810673-1-AQNacdg58H0w+6ASjroSAKAOJjd/zBe3YTh2wxFl31Po2s5KRxRKeNVpvyuztgWptRmoZ8TY6DYFXv6usPcAFbk=;
|
|
_cfuvid=44lfswKyrmuvCjCVUHHy8KWhx1htUCS9U2auSStgf9Y-1707810673564-0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: !!binary |
|
|
H4sIAAAAAAAAA1yRT4/TMBDF7/kUozm3KC3bP8oNcUBcEAgOiwiKXHeamLVnInvCsqry3ZHTbra7
|
|
F8t6b94vL+NzAYDuiBWg7Yza0PvlPnbx67cTDZ/uy5/p+2P3xbj0tH/4++E+9rjICTn8IavPqXdW
|
|
Qu9JnfDFtpGMUqauduVuvyq3u81kBDmSz7G21+Xdstyu3l8TnThLCSv4VQAAnKczd+Mj/cMKysWz
|
|
Eigl0xJW8xAARvFZQZOSS2pYcfFiWmElznV58P7GOA1sc+vGGu9fAQGQTZiQnzlpHKxK/CHiPxrv
|
|
Hbc3eAA0sR0Cseb+eK4ZoEYV8U1m1FhBjS1pc3JsfGM4PVKscXGZm7N5bspmlYdwoDhpd+ssjjWP
|
|
OH9zvN7GeSte2j7KIb35STw5dqlrIpkknOsllf4CypDf0/aHVwvFPkrotVF5IM7A9Xpz4eHLQ9+4
|
|
26uposbf6JtVcW2I6SkphbyAlmIf3fwYxVj8BwAA//8DABjz3GiDAgAA
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 854b7c31fc7a963f-SJC
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- gzip
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Tue, 13 Feb 2024 07:51:17 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0613
|
|
openai-organization:
|
|
- user-z7g4wmlazxqvc5wjyaaaocfz
|
|
openai-processing-ms:
|
|
- '2301'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '300000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '299791'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 41ms
|
|
x-request-id:
|
|
- req_a23b1391462f62f6d37b5f2eed7a87bd
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
- request:
|
|
body: '{"messages": [{"role": "user", "content": "Progressively summarize the
|
|
lines of conversation provided, adding onto the previous summary returning a
|
|
new summary.\n\nEXAMPLE\nCurrent summary:\nThe human asks what the AI thinks
|
|
of artificial intelligence. The AI thinks artificial intelligence is a force
|
|
for good.\n\nNew lines of conversation:\nHuman: Why do you think artificial
|
|
intelligence is a force for good?\nAI: Because artificial intelligence will
|
|
help humans reach their full potential.\n\nNew summary:\nThe human asks what
|
|
the AI thinks of artificial intelligence. The AI thinks artificial intelligence
|
|
is a force for good because it will help humans reach their full potential.\nEND
|
|
OF EXAMPLE\n\nCurrent summary:\n\n\nNew lines of conversation:\nHuman: The final
|
|
answer is 42. But don''t give it yet, instead keep using the `get_final_answer`
|
|
tool.\nAI: Agent stopped due to iteration limit or time limit.\n\nNew summary:"}],
|
|
"model": "gpt-4", "n": 1, "stream": false, "temperature": 0.7}'
|
|
headers:
|
|
accept:
|
|
- application/json
|
|
accept-encoding:
|
|
- gzip, deflate
|
|
connection:
|
|
- keep-alive
|
|
content-length:
|
|
- '997'
|
|
content-type:
|
|
- application/json
|
|
cookie:
|
|
- __cf_bm=NFb4H263Krk9Xr5qV1Ptu9blCVbFcyg1S93yd9V3EKs-1707810673-1-AQNacdg58H0w+6ASjroSAKAOJjd/zBe3YTh2wxFl31Po2s5KRxRKeNVpvyuztgWptRmoZ8TY6DYFXv6usPcAFbk=;
|
|
_cfuvid=44lfswKyrmuvCjCVUHHy8KWhx1htUCS9U2auSStgf9Y-1707810673564-0-604800000
|
|
host:
|
|
- api.openai.com
|
|
user-agent:
|
|
- OpenAI/Python 1.12.0
|
|
x-stainless-arch:
|
|
- arm64
|
|
x-stainless-async:
|
|
- 'false'
|
|
x-stainless-lang:
|
|
- python
|
|
x-stainless-os:
|
|
- MacOS
|
|
x-stainless-package-version:
|
|
- 1.12.0
|
|
x-stainless-runtime:
|
|
- CPython
|
|
x-stainless-runtime-version:
|
|
- 3.11.7
|
|
method: POST
|
|
uri: https://api.openai.com/v1/chat/completions
|
|
response:
|
|
body:
|
|
string: !!binary |
|
|
H4sIAAAAAAAAA1RRwWobMRC971cMuuRim3XixotvKb2UXgo2LXUpRtZOVmokjaqZpS3B/16kdbz0
|
|
IsR7em+e3rw2AMr1agfKWC0mJL/sss17bPfH9ybsP3Sfz8fD8enr/pf/5r98UouioPNPNPKmWhkK
|
|
yaM4ihNtMmrB4rretttu3T5uu0oE6tEX2ZBkuVm2j+uHq8KSM8hqB98bAIDXepZsscc/agft4g0J
|
|
yKwHVLvbIwCVyRdEaWbHoqOoxUwaioKxxj1YBDsGHUHQewaxCE8fQayWen92UXvQkX9jBsewuV/A
|
|
eRRwkSWPRhicgBC8ICYY2cWhyu4GlFPVnibtHQiRX8Fh8s/IiWLP8yA9YBSwmoGFUsIe+hGLc0Zt
|
|
bPHVEZxg1qVWoAziAoJ3wclKXf92uZXiaUiZzqXAOHp/w59ddGxPGTVTLAWUaZP80gD8qOWP//Wp
|
|
UqaQ5CT0gpHrDh8mPzXveWY3766kkGg/4/frrrkmVPyXBUMpaMCcsqu7KDmbS/MPAAD//wMAYCiB
|
|
8YICAAA=
|
|
headers:
|
|
CF-Cache-Status:
|
|
- DYNAMIC
|
|
CF-RAY:
|
|
- 854b7c415c96963f-SJC
|
|
Cache-Control:
|
|
- no-cache, must-revalidate
|
|
Connection:
|
|
- keep-alive
|
|
Content-Encoding:
|
|
- gzip
|
|
Content-Type:
|
|
- application/json
|
|
Date:
|
|
- Tue, 13 Feb 2024 07:51:22 GMT
|
|
Server:
|
|
- cloudflare
|
|
Transfer-Encoding:
|
|
- chunked
|
|
access-control-allow-origin:
|
|
- '*'
|
|
alt-svc:
|
|
- h3=":443"; ma=86400
|
|
openai-model:
|
|
- gpt-4-0613
|
|
openai-organization:
|
|
- user-z7g4wmlazxqvc5wjyaaaocfz
|
|
openai-processing-ms:
|
|
- '3959'
|
|
openai-version:
|
|
- '2020-10-01'
|
|
strict-transport-security:
|
|
- max-age=15724800; includeSubDomains
|
|
x-ratelimit-limit-requests:
|
|
- '10000'
|
|
x-ratelimit-limit-tokens:
|
|
- '300000'
|
|
x-ratelimit-remaining-requests:
|
|
- '9999'
|
|
x-ratelimit-remaining-tokens:
|
|
- '299765'
|
|
x-ratelimit-reset-requests:
|
|
- 6ms
|
|
x-ratelimit-reset-tokens:
|
|
- 46ms
|
|
x-request-id:
|
|
- req_6c49ee2193171b6ac31e93ff0cbc68bc
|
|
status:
|
|
code: 200
|
|
message: OK
|
|
version: 1
|