mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-04 05:38:33 +00:00
Compare commits
34 Commits
lj/conditi
...
feature/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
92fca9bbe9 | ||
|
|
5c04c63127 | ||
|
|
1a44a34c17 | ||
|
|
363ce5e9ce | ||
|
|
10b84955ad | ||
|
|
691b094a40 | ||
|
|
68e9e54c88 | ||
|
|
d0d99125c4 | ||
|
|
129000d01f | ||
|
|
47f9d026dd | ||
|
|
b75b0b5552 | ||
|
|
3dd6249f1e | ||
|
|
8451113039 | ||
|
|
a79b216875 | ||
|
|
52217c2f63 | ||
|
|
7edacf6e24 | ||
|
|
58558a1950 | ||
|
|
1607c85ae5 | ||
|
|
a6ff342948 | ||
|
|
d2eb54ebf8 | ||
|
|
a41bd18599 | ||
|
|
bb64c80964 | ||
|
|
2fb56f1f9f | ||
|
|
35676fe2f5 | ||
|
|
81ed6f177e | ||
|
|
4bcd1df6bb | ||
|
|
6fae56dd60 | ||
|
|
430f0e9013 | ||
|
|
d7f080a978 | ||
|
|
5d18f73654 | ||
|
|
57fc079267 | ||
|
|
706f4cd74a | ||
|
|
2e3646cc96 | ||
|
|
844cc515d5 |
6
.github/workflows/tests.yml
vendored
6
.github/workflows/tests.yml
vendored
@@ -19,13 +19,13 @@ jobs:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
python-version: "3.11.9"
|
||||
|
||||
- name: Install Requirements
|
||||
run: |
|
||||
set -e
|
||||
pip install poetry
|
||||
poetry lock &&
|
||||
poetry install
|
||||
|
||||
- name: Run tests
|
||||
run: poetry run pytest tests
|
||||
run: poetry run pytest
|
||||
|
||||
@@ -123,7 +123,7 @@ result = my_crew.kickoff()
|
||||
print(result)
|
||||
```
|
||||
|
||||
### Different wayt to Kicking Off a Crew
|
||||
### Different ways to Kicking Off a Crew
|
||||
|
||||
Once your crew is assembled, initiate the workflow with the appropriate kickoff method. CrewAI provides several methods for better control over the kickoff process: `kickoff()`, `kickoff_for_each()`, `kickoff_async()`, and `kickoff_for_each_async()`.
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ description: Leveraging memory systems in the crewAI framework to enhance agent
|
||||
| Component | Description |
|
||||
| :------------------- | :----------------------------------------------------------- |
|
||||
| **Short-Term Memory**| Temporarily stores recent interactions and outcomes, enabling agents to recall and utilize information relevant to their current context during the current executions. |
|
||||
| **Long-Term Memory** | Preserves valuable insights and learnings from past executions, allowing agents to build and refine their knowledge over time. So Agents can remeber what they did right and wrong across multiple executions |
|
||||
| **Long-Term Memory** | Preserves valuable insights and learnings from past executions, allowing agents to build and refine their knowledge over time. So Agents can remember what they did right and wrong across multiple executions |
|
||||
| **Entity Memory** | Captures and organizes information about entities (people, places, concepts) encountered during tasks, facilitating deeper understanding and relationship mapping. |
|
||||
| **Contextual Memory**| Maintains the context of interactions by combining `ShortTermMemory`, `LongTermMemory`, and `EntityMemory`, aiding in the coherence and relevance of agent responses over a sequence of tasks or a conversation. |
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ The training feature in CrewAI allows you to train your AI agents using the comm
|
||||
|
||||
During training, CrewAI utilizes techniques to optimize the performance of your agents along with human feedback. This helps the agents improve their understanding, decision-making, and problem-solving abilities.
|
||||
|
||||
### Training Your Crew Using the CLI
|
||||
To use the training feature, follow these steps:
|
||||
|
||||
1. Open your terminal or command prompt.
|
||||
@@ -18,7 +19,26 @@ To use the training feature, follow these steps:
|
||||
crewai train -n <n_iterations>
|
||||
```
|
||||
|
||||
Replace `<n_iterations>` with the desired number of training iterations. This determines how many times the agents will go through the training process.
|
||||
### Training Your Crew Programmatically
|
||||
To train your crew programmatically, use the following steps:
|
||||
|
||||
1. Define the number of iterations for training.
|
||||
2. Specify the input parameters for the training process.
|
||||
3. Execute the training command within a try-except block to handle potential errors.
|
||||
|
||||
```python
|
||||
n_iterations = 2
|
||||
inputs = {"topic": "CrewAI Training"}
|
||||
|
||||
try:
|
||||
YourCrewName_Crew().crew().train(n_iterations= n_iterations, inputs=inputs)
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f"An error occurred while training the crew: {e}")
|
||||
```
|
||||
|
||||
!!! note "Replace `<n_iterations>` with the desired number of training iterations. This determines how many times the agents will go through the training process."
|
||||
|
||||
|
||||
### Key Points to Note:
|
||||
- **Positive Integer Requirement:** Ensure that the number of iterations (`n_iterations`) is a positive integer. The code will raise a `ValueError` if this condition is not met.
|
||||
|
||||
@@ -51,7 +51,7 @@ To optimize tool performance with caching, define custom caching strategies usin
|
||||
@tool("Tool with Caching")
|
||||
def cached_tool(argument: str) -> str:
|
||||
"""Tool functionality description."""
|
||||
return "Cachable result"
|
||||
return "Cacheable result"
|
||||
|
||||
def my_cache_strategy(arguments: dict, result: str) -> bool:
|
||||
# Define custom caching logic
|
||||
|
||||
@@ -79,5 +79,4 @@ manager = Agent(
|
||||
|
||||
1. `allow_code_execution`: Enable or disable code execution capabilities for the agent (default is False).
|
||||
2. `max_execution_time`: Set a maximum execution time (in seconds) for the agent to complete a task.
|
||||
3. `function_calling_llm`: Specify a separate language model for function calling.
|
||||
4
|
||||
3. `function_calling_llm`: Specify a separate language model for function calling.
|
||||
31
docs/how-to/Force-Tool-Ouput-as-Result.md
Normal file
31
docs/how-to/Force-Tool-Ouput-as-Result.md
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
title: Forcing Tool Output as Result
|
||||
description: Learn how to force tool output as the result in of an Agent's task in crewAI.
|
||||
---
|
||||
|
||||
## Introduction
|
||||
In CrewAI, you can force the output of a tool as the result of an agent's task. This feature is useful when you want to ensure that the tool output is captured and returned as the task result, and avoid the agent modifying the output during the task execution.
|
||||
|
||||
## Forcing Tool Output as Result
|
||||
To force the tool output as the result of an agent's task, you can set the `force_tool_output` parameter to `True` when creating the task. This parameter ensures that the tool output is captured and returned as the task result, without any modifications by the agent.
|
||||
|
||||
Here's an example of how to force the tool output as the result of an agent's task:
|
||||
|
||||
```python
|
||||
# ...
|
||||
# Define a custom tool that returns the result as the answer
|
||||
coding_agent =Agent(
|
||||
role="Data Scientist",
|
||||
goal="Product amazing resports on AI",
|
||||
backstory="You work with data and AI",
|
||||
tools=[MyCustomTool(result_as_answer=True)],
|
||||
)
|
||||
# ...
|
||||
```
|
||||
|
||||
### Workflow in Action
|
||||
|
||||
1. **Task Execution**: The agent executes the task using the tool provided.
|
||||
2. **Tool Output**: The tool generates the output, which is captured as the task result.
|
||||
3. **Agent Interaction**: The agent my reflect and take learnings from the tool but the output is not modified.
|
||||
4. **Result Return**: The tool output is returned as the task result without any modifications.
|
||||
137
docs/how-to/Start-a-New-CrewAI-Project.md
Normal file
137
docs/how-to/Start-a-New-CrewAI-Project.md
Normal file
@@ -0,0 +1,137 @@
|
||||
---
|
||||
title: Starting a New CrewAI Project
|
||||
description: A comprehensive guide to starting a new CrewAI project, including the latest updates and project setup methods.
|
||||
---
|
||||
|
||||
# Starting Your CrewAI Project
|
||||
|
||||
Welcome to the ultimate guide for starting a new CrewAI project. This document will walk you through the steps to create, customize, and run your CrewAI project, ensuring you have everything you need to get started.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
We assume you have already installed CrewAI. If not, please refer to the [installation guide](how-to/Installing-CrewAI.md) to install CrewAI and its dependencies.
|
||||
|
||||
## Creating a New Project
|
||||
|
||||
To create a new project, run the following CLI command:
|
||||
|
||||
```shell
|
||||
$ crewai create my_project
|
||||
```
|
||||
|
||||
This command will create a new project folder with the following structure:
|
||||
|
||||
```shell
|
||||
my_project/
|
||||
├── .gitignore
|
||||
├── pyproject.toml
|
||||
├── README.md
|
||||
└── src/
|
||||
└── my_project/
|
||||
├── __init__.py
|
||||
├── main.py
|
||||
├── crew.py
|
||||
├── tools/
|
||||
│ ├── custom_tool.py
|
||||
│ └── __init__.py
|
||||
└── config/
|
||||
├── agents.yaml
|
||||
└── tasks.yaml
|
||||
```
|
||||
|
||||
You can now start developing your project by editing the files in the `src/my_project` folder. The `main.py` file is the entry point of your project, and the `crew.py` file is where you define your agents and tasks.
|
||||
|
||||
## Customizing Your Project
|
||||
|
||||
To customize your project, you can:
|
||||
- Modify `src/my_project/config/agents.yaml` to define your agents.
|
||||
- Modify `src/my_project/config/tasks.yaml` to define your tasks.
|
||||
- Modify `src/my_project/crew.py` to add your own logic, tools, and specific arguments.
|
||||
- Modify `src/my_project/main.py` to add custom inputs for your agents and tasks.
|
||||
- Add your environment variables into the `.env` file.
|
||||
|
||||
### Example: Defining Agents and Tasks
|
||||
|
||||
#### agents.yaml
|
||||
|
||||
```yaml
|
||||
researcher:
|
||||
role: >
|
||||
Job Candidate Researcher
|
||||
goal: >
|
||||
Find potential candidates for the job
|
||||
backstory: >
|
||||
You are adept at finding the right candidates by exploring various online
|
||||
resources. Your skill in identifying suitable candidates ensures the best
|
||||
match for job positions.
|
||||
```
|
||||
|
||||
#### tasks.yaml
|
||||
|
||||
```yaml
|
||||
research_candidates_task:
|
||||
description: >
|
||||
Conduct thorough research to find potential candidates for the specified job.
|
||||
Utilize various online resources and databases to gather a comprehensive list of potential candidates.
|
||||
Ensure that the candidates meet the job requirements provided.
|
||||
|
||||
Job Requirements:
|
||||
{job_requirements}
|
||||
expected_output: >
|
||||
A list of 10 potential candidates with their contact information and brief profiles highlighting their suitability.
|
||||
```
|
||||
|
||||
## Installing Dependencies
|
||||
|
||||
To install the dependencies for your project, you can use Poetry. First, navigate to your project directory:
|
||||
|
||||
```shell
|
||||
$ cd my_project
|
||||
$ poetry lock
|
||||
$ poetry install
|
||||
```
|
||||
|
||||
This will install the dependencies specified in the `pyproject.toml` file.
|
||||
|
||||
## Interpolating Variables
|
||||
|
||||
Any variable interpolated in your `agents.yaml` and `tasks.yaml` files like `{variable}` will be replaced by the value of the variable in the `main.py` file.
|
||||
|
||||
#### agents.yaml
|
||||
|
||||
```yaml
|
||||
research_task:
|
||||
description: >
|
||||
Conduct a thorough research about the customer and competitors in the context
|
||||
of {customer_domain}.
|
||||
Make sure you find any interesting and relevant information given the
|
||||
current year is 2024.
|
||||
expected_output: >
|
||||
A complete report on the customer and their customers and competitors,
|
||||
including their demographics, preferences, market positioning and audience engagement.
|
||||
```
|
||||
|
||||
#### main.py
|
||||
|
||||
```python
|
||||
# main.py
|
||||
def run():
|
||||
inputs = {
|
||||
"customer_domain": "crewai.com"
|
||||
}
|
||||
MyProjectCrew(inputs).crew().kickoff(inputs=inputs)
|
||||
```
|
||||
|
||||
## Running Your Project
|
||||
|
||||
To run your project, use the following command:
|
||||
|
||||
```shell
|
||||
$ poetry run my_project
|
||||
```
|
||||
|
||||
This will initialize your crew of AI agents and begin task execution as defined in your configuration in the `main.py` file.
|
||||
|
||||
## Deploying Your Project
|
||||
|
||||
The easiest way to deploy your crew is through [CrewAI+](https://www.crewai.com/crewaiplus), where you can deploy your crew in a few clicks.
|
||||
@@ -48,6 +48,11 @@ Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By
|
||||
<div style="width:30%">
|
||||
<h2>How-To Guides</h2>
|
||||
<ul>
|
||||
<li>
|
||||
<a href="./how-to/Start-a-New-CrewAI-Project">
|
||||
Starting Your crewAI Project
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="./how-to/Installing-CrewAI">
|
||||
Installing crewAI
|
||||
@@ -88,6 +93,11 @@ Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By
|
||||
Coding Agents
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="./how-to/Force-Tool-Ouput-as-Result">
|
||||
Forcing Tool Output as Result
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="./how-to/Human-Input-on-Execution">
|
||||
Human Input on Execution
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
We are still working on improving tools, so there might be unexpected behavior or changes in the future.
|
||||
|
||||
## Description
|
||||
The GithubSearchTool is a Read, Append, and Generate (RAG) tool specifically designed for conducting semantic searches within GitHub repositories. Utilizing advanced semantic search capabilities, it sifts through code, pull requests, issues, and repositories, making it an essential tool for developers, researchers, or anyone in need of precise information from GitHub.
|
||||
The GithubSearchTool is a Retrieval-Augmented Generation (RAG) tool specifically designed for conducting semantic searches within GitHub repositories. Utilizing advanced semantic search capabilities, it sifts through code, pull requests, issues, and repositories, making it an essential tool for developers, researchers, or anyone in need of precise information from GitHub.
|
||||
|
||||
## Installation
|
||||
To use the GithubSearchTool, first ensure the crewai_tools package is installed in your Python environment:
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
The MDXSearchTool is in continuous development. Features may be added or removed, and functionality could change unpredictably as we refine the tool.
|
||||
|
||||
## Description
|
||||
The MDX Search Tool is a component of the `crewai_tools` package aimed at facilitating advanced market data extraction. This tool is invaluable for researchers and analysts seeking quick access to market insights, especially within the AI sector. It simplifies the task of acquiring, interpreting, and organizing market data by interfacing with various data sources.
|
||||
The MDX Search Tool is a component of the `crewai_tools` package aimed at facilitating advanced markdown language extraction. It enables users to effectively search and extract relevant information from MD files using query-based searches. This tool is invaluable for data analysis, information management, and research tasks, streamlining the process of finding specific information within large document collections.
|
||||
|
||||
## Installation
|
||||
Before using the MDX Search Tool, ensure the `crewai_tools` package is installed. If it is not, you can install it with the following command:
|
||||
@@ -59,4 +59,4 @@ tool = MDXSearchTool(
|
||||
),
|
||||
)
|
||||
)
|
||||
```
|
||||
```
|
||||
|
||||
@@ -31,7 +31,7 @@ tool = TXTSearchTool(txt='path/to/text/file.txt')
|
||||
```
|
||||
|
||||
## Arguments
|
||||
- `txt` (str): **Optinal**. The path to the text file you want to search. This argument is only required if the tool was not initialized with a specific text file; otherwise, the search will be conducted within the initially provided text file.
|
||||
- `txt` (str): **Optional**. The path to the text file you want to search. This argument is only required if the tool was not initialized with a specific text file; otherwise, the search will be conducted within the initially provided text file.
|
||||
|
||||
## Custom model and embeddings
|
||||
|
||||
|
||||
@@ -131,6 +131,7 @@ nav:
|
||||
- Using LangChain Tools: 'core-concepts/Using-LangChain-Tools.md'
|
||||
- Using LlamaIndex Tools: 'core-concepts/Using-LlamaIndex-Tools.md'
|
||||
- How to Guides:
|
||||
- Starting Your crewAI Project: 'how-to/Start-a-New-CrewAI-Project.md'
|
||||
- Installing CrewAI: 'how-to/Installing-CrewAI.md'
|
||||
- Getting Started: 'how-to/Creating-a-Crew-and-kick-it-off.md'
|
||||
- Create Custom Tools: 'how-to/Create-Custom-Tools.md'
|
||||
@@ -140,6 +141,7 @@ nav:
|
||||
- Connecting to any LLM: 'how-to/LLM-Connections.md'
|
||||
- Customizing Agents: 'how-to/Customizing-Agents.md'
|
||||
- Coding Agents: 'how-to/Coding-Agents.md'
|
||||
- Forcing Tool Output as Result: 'how-to/Force-Tool-Ouput-as-Result.md'
|
||||
- Human Input on Execution: 'how-to/Human-Input-on-Execution.md'
|
||||
- Kickoff a Crew Asynchronously: 'how-to/Kickoff-async.md'
|
||||
- Kickoff a Crew for a List: 'how-to/Kickoff-for-each.md'
|
||||
|
||||
575
poetry.lock
generated
575
poetry.lock
generated
@@ -1,5 +1,25 @@
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "agentops"
|
||||
version = "0.1.12"
|
||||
description = "Python SDK for developing AI agent evals and observability"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "agentops-0.1.12-py3-none-any.whl", hash = "sha256:b4b47c990638b74810cc1c38624ada162094b46e3fdd63883642a16bc5258386"},
|
||||
{file = "agentops-0.1.12.tar.gz", hash = "sha256:c4f762482fb240fc3503907f52498f2d8d9e4f80236ee4a12bf039317a85fcd7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = "23.2"
|
||||
psutil = "5.9.8"
|
||||
requests = "2.31.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest (==7.4.0)", "requests-mock (==1.11.0)"]
|
||||
langchain = ["langchain (>=1.19,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.9.5"
|
||||
@@ -323,17 +343,17 @@ lxml = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.137"
|
||||
version = "1.34.140"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.137-py3-none-any.whl", hash = "sha256:7cb697d67fd138ceebc6f789919ae370c092a50c6b0ccc4ef483027935502eab"},
|
||||
{file = "boto3-1.34.137.tar.gz", hash = "sha256:0b21b84db4619b3711a6f643d465a5a25e81231ee43615c55a20ff6b89c6cc3c"},
|
||||
{file = "boto3-1.34.140-py3-none-any.whl", hash = "sha256:23ca8d8f7a30c3bbd989808056b5fc5d68ff5121c02c722c6167b6b1bb7f8726"},
|
||||
{file = "boto3-1.34.140.tar.gz", hash = "sha256:578bbd5e356005719b6b610d03edff7ea1b0824d078afe62d3fb8bea72f83a87"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.137,<1.35.0"
|
||||
botocore = ">=1.34.140,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
@@ -342,13 +362,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.137"
|
||||
version = "1.34.140"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.137-py3-none-any.whl", hash = "sha256:a980fa4adec4bfa23fff70a3512622e9412c69c791898a52cafc2458b0be6040"},
|
||||
{file = "botocore-1.34.137.tar.gz", hash = "sha256:e29c8e9bfda0b20a1997792968e85868bfce42fefad9730f633a81adcff3f2ef"},
|
||||
{file = "botocore-1.34.140-py3-none-any.whl", hash = "sha256:43940d3a67d946ba3301631ba4078476a75f1015d4fb0fb0272d0b754b2cf9de"},
|
||||
{file = "botocore-1.34.140.tar.gz", hash = "sha256:86302b2226c743b9eec7915a4c6cfaffd338ae03989cd9ee181078ef39d1ab39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -438,13 +458,13 @@ test = ["flake8", "isort", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2024.6.2"
|
||||
version = "2024.7.4"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
|
||||
{file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
|
||||
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
|
||||
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -727,13 +747,13 @@ all = ["pycocotools (==2.0.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "clarifai-grpc"
|
||||
version = "10.5.4"
|
||||
version = "10.6.1"
|
||||
description = "Clarifai gRPC API Client"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "clarifai_grpc-10.5.4-py3-none-any.whl", hash = "sha256:ae4c4d8985fdd2bf326cec27ee834571e44d0e989fb12686dd681f9b553ae218"},
|
||||
{file = "clarifai_grpc-10.5.4.tar.gz", hash = "sha256:c67ce0dde186e8bab0d42a9923d28ddb4a05017b826c8e52ac7a86ec6df5f12a"},
|
||||
{file = "clarifai_grpc-10.6.1-py3-none-any.whl", hash = "sha256:7f07c262f46042995b11af10cdd552718c4487e955db1b3f1253fcb0c2ab1ce1"},
|
||||
{file = "clarifai_grpc-10.6.1.tar.gz", hash = "sha256:f692e3d6a051a1228ca371c3a9dc705cc9a61334eecc454d056f7af0b6f4dbad"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -820,13 +840,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "crewai-tools"
|
||||
version = "0.4.5"
|
||||
version = "0.4.8"
|
||||
description = "Set of tools for the crewAI framework"
|
||||
optional = false
|
||||
python-versions = "<=3.13,>=3.10"
|
||||
files = [
|
||||
{file = "crewai_tools-0.4.5-py3-none-any.whl", hash = "sha256:d970a152a69d039eb23150755d4dc9e7c6ef9176b19a80348142619518e39b17"},
|
||||
{file = "crewai_tools-0.4.5.tar.gz", hash = "sha256:1d7763f20afd95c6be70d31f9f0e9334cb42be127c17feddce368907077a6543"},
|
||||
{file = "crewai_tools-0.4.8-py3-none-any.whl", hash = "sha256:628b08515ee0e06c751da1dd66b0cff70c9b2644775891c8f59883cb5debfef4"},
|
||||
{file = "crewai_tools-0.4.8.tar.gz", hash = "sha256:ae190bd187f980163523c86ee7e1eb2ed78896f935d6caff98908dd7ab6c982b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -834,9 +854,9 @@ beautifulsoup4 = ">=4.12.3,<5.0.0"
|
||||
chromadb = ">=0.4.22,<0.5.0"
|
||||
docker = ">=7.1.0,<8.0.0"
|
||||
docx2txt = ">=0.8,<0.9"
|
||||
embedchain = ">=0.1.113,<0.2.0"
|
||||
embedchain = ">=0.1.114,<0.2.0"
|
||||
lancedb = ">=0.5.4,<0.6.0"
|
||||
langchain = ">=0.1.4,<0.2.0"
|
||||
langchain = ">0.2,<=0.3"
|
||||
openai = ">=1.12.0,<2.0.0"
|
||||
pydantic = ">=2.6.1,<3.0.0"
|
||||
pyright = ">=1.1.350,<2.0.0"
|
||||
@@ -1034,13 +1054,13 @@ idna = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "embedchain"
|
||||
version = "0.1.113"
|
||||
version = "0.1.116"
|
||||
description = "Simplest open source retrieval (RAG) framework"
|
||||
optional = false
|
||||
python-versions = "<=3.13,>=3.9"
|
||||
files = [
|
||||
{file = "embedchain-0.1.113-py3-none-any.whl", hash = "sha256:f37b029d8f8509a5db99d1579168ab2ba7d5841c280289f6a2ae702601caf96f"},
|
||||
{file = "embedchain-0.1.113.tar.gz", hash = "sha256:5477012d37912a0e89758263b1a8db4699b7d0dedd7f18ccc89f3381d6b9173d"},
|
||||
{file = "embedchain-0.1.116-py3-none-any.whl", hash = "sha256:388835d047f9ff4542ebf50e3fa633ef596db262cbe506195ee4976b91a49172"},
|
||||
{file = "embedchain-0.1.116.tar.gz", hash = "sha256:3e4d6418df2e749c2bd3cd3153c3857cbecd7227afe40b87d5ac3df629c394b2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1048,11 +1068,14 @@ alembic = ">=1.13.1,<2.0.0"
|
||||
beautifulsoup4 = ">=4.12.2,<5.0.0"
|
||||
chromadb = ">=0.4.24,<0.5.0"
|
||||
clarifai = ">=10.0.1,<11.0.0"
|
||||
cohere = ">=5.3,<6.0"
|
||||
google-cloud-aiplatform = ">=1.26.1,<2.0.0"
|
||||
gptcache = ">=0.1.43,<0.2.0"
|
||||
langchain = ">=0.1.4,<0.2.0"
|
||||
langchain = ">0.2,<=0.3"
|
||||
langchain-cohere = ">=0.1.4,<0.2.0"
|
||||
langchain-community = ">=0.2.6,<0.3.0"
|
||||
langchain-openai = ">=0.1.7,<0.2.0"
|
||||
memzero = ">=0.0.7,<0.0.8"
|
||||
openai = ">=1.1.1"
|
||||
posthog = ">=3.0.2,<4.0.0"
|
||||
pypdf = ">=4.0.1,<5.0.0"
|
||||
@@ -1065,7 +1088,6 @@ tiktoken = ">=0.7.0,<0.8.0"
|
||||
|
||||
[package.extras]
|
||||
aws-bedrock = ["boto3 (>=1.34.20,<2.0.0)"]
|
||||
cohere = ["cohere (>=5.3,<6.0)"]
|
||||
dataloaders = ["docx2txt (>=0.8,<0.9)", "duckduckgo-search (>=6.1.5,<7.0.0)", "pytube (>=15.0.0,<16.0.0)", "sentence-transformers (>=2.2.2,<3.0.0)", "youtube-transcript-api (>=0.6.1,<0.7.0)"]
|
||||
discord = ["discord (>=2.3.2,<3.0.0)"]
|
||||
dropbox = ["dropbox (>=11.36.2,<12.0.0)"]
|
||||
@@ -1078,7 +1100,7 @@ huggingface-hub = ["huggingface_hub (>=0.17.3,<0.18.0)"]
|
||||
lancedb = ["lancedb (>=0.6.2,<0.7.0)"]
|
||||
llama2 = ["replicate (>=0.15.4,<0.16.0)"]
|
||||
milvus = ["pymilvus (==2.4.3)"]
|
||||
mistralai = ["langchain-mistralai (>=0.0.3,<0.0.4)"]
|
||||
mistralai = ["langchain-mistralai (>=0.1.9,<0.2.0)"]
|
||||
modal = ["modal (>=0.56.4329,<0.57.0)"]
|
||||
mysql = ["mysql-connector-python (>=8.1.0,<9.0.0)"]
|
||||
opensearch = ["opensearch-py (==2.3.1)"]
|
||||
@@ -1089,7 +1111,7 @@ qdrant = ["qdrant-client (>=1.6.3,<2.0.0)"]
|
||||
rss-feed = ["feedparser (>=6.0.10,<7.0.0)", "listparser (>=0.19,<0.20)", "newspaper3k (>=0.2.8,<0.3.0)"]
|
||||
slack = ["flask (>=2.3.3,<3.0.0)", "slack-sdk (==3.21.3)"]
|
||||
together = ["together (>=0.2.8,<0.3.0)"]
|
||||
vertexai = ["langchain-google-vertexai (>=0.0.5,<0.0.6)"]
|
||||
vertexai = ["langchain-google-vertexai (>=1.0.6,<2.0.0)"]
|
||||
weaviate = ["weaviate-client (>=3.24.1,<4.0.0)"]
|
||||
whatsapp = ["flask (>=2.3.3,<3.0.0)", "twilio (>=8.5.0,<9.0.0)"]
|
||||
youtube = ["youtube-transcript-api (>=0.6.1,<0.7.0)", "yt_dlp (>=2023.11.14,<2024.0.0)"]
|
||||
@@ -1423,13 +1445,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-aiplatform"
|
||||
version = "1.57.0"
|
||||
version = "1.58.0"
|
||||
description = "Vertex AI API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "google-cloud-aiplatform-1.57.0.tar.gz", hash = "sha256:113905f100cb0a9ad744a2445a7675f92f28600233ba499614aa704d11a809b7"},
|
||||
{file = "google_cloud_aiplatform-1.57.0-py2.py3-none-any.whl", hash = "sha256:ca5391a56e0cc8f4ed39a2beb7be02f51936ff04fd5304775a72a86c345d0e47"},
|
||||
{file = "google-cloud-aiplatform-1.58.0.tar.gz", hash = "sha256:7a05aceac4a6c7eaa26e684e9f202b829cc7e57f82bffe7281684275a553fcad"},
|
||||
{file = "google_cloud_aiplatform-1.58.0-py2.py3-none-any.whl", hash = "sha256:21f1320860f4916183ec939fdf2ff3fc1d7fdde97fe5795974257ab21f9458ec"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1450,7 +1472,7 @@ autologging = ["mlflow (>=1.27.0,<=2.1.1)"]
|
||||
cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||
datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
|
||||
endpoint = ["requests (>=2.28.1)"]
|
||||
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||
full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||
langchain = ["langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "tenacity (<=8.3)"]
|
||||
langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "langchain (>=0.1.16,<0.3)", "langchain-core (<0.2)", "langchain-google-vertexai (<2)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "tenacity (<=8.3)"]
|
||||
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||
@@ -1459,12 +1481,12 @@ pipelines = ["pyyaml (>=5.3.1,<7)"]
|
||||
prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"]
|
||||
preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"]
|
||||
private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
|
||||
rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"]
|
||||
rapid-evaluation = ["pandas (>=1.0.0,<2.2.0)", "tqdm (>=4.23.0)"]
|
||||
ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "setuptools (<70.0.0)"]
|
||||
ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (==2.9.3)", "scikit-learn", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
|
||||
reasoningengine = ["cloudpickle (>=3.0,<4.0)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)"]
|
||||
tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
|
||||
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
|
||||
testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
|
||||
tokenization = ["sentencepiece (>=0.2.0)"]
|
||||
vizier = ["google-vizier (>=0.1.6)"]
|
||||
xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
|
||||
@@ -2030,13 +2052,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.5.36"
|
||||
version = "2.6.0"
|
||||
description = "File identification library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"},
|
||||
{file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"},
|
||||
{file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"},
|
||||
{file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -2357,156 +2379,140 @@ tests = ["aiohttp", "duckdb", "pandas (>=1.4)", "polars (>=0.19)", "pytest", "py
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.1.20"
|
||||
version = "0.2.6"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9"},
|
||||
{file = "langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e"},
|
||||
{file = "langchain-0.2.6-py3-none-any.whl", hash = "sha256:f86e8a7afd3e56f8eb5ba47f01dd00144fb9fc2f1db9873bd197347be2857aa4"},
|
||||
{file = "langchain-0.2.6.tar.gz", hash = "sha256:867f6add370c1e3911b0e87d3dd0e36aec1e8f513bf06131340fe8f151d89dc5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
||||
dataclasses-json = ">=0.5.7,<0.7"
|
||||
langchain-community = ">=0.0.38,<0.1"
|
||||
langchain-core = ">=0.1.52,<0.2.0"
|
||||
langchain-text-splitters = ">=0.0.1,<0.1"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
langchain-text-splitters = ">=0.2.0,<0.3.0"
|
||||
langsmith = ">=0.1.17,<0.2.0"
|
||||
numpy = ">=1,<2"
|
||||
numpy = [
|
||||
{version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""},
|
||||
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||
]
|
||||
pydantic = ">=1,<3"
|
||||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"]
|
||||
clarifai = ["clarifai (>=9.1.0)"]
|
||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
||||
cohere = ["cohere (>=4,<6)"]
|
||||
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
|
||||
embeddings = ["sentence-transformers (>=2,<3)"]
|
||||
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
||||
javascript = ["esprima (>=4.0.1,<5.0.0)"]
|
||||
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
|
||||
openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
|
||||
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
|
||||
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-cohere"
|
||||
version = "0.1.5"
|
||||
version = "0.1.8"
|
||||
description = "An integration package connecting Cohere and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_cohere-0.1.5-py3-none-any.whl", hash = "sha256:f07bd53fadbebf744b8de1eebf977353f340f2010156821623a0c6247032ab9b"},
|
||||
{file = "langchain_cohere-0.1.5.tar.gz", hash = "sha256:d0be4e76079a74c4259fe4db2bab535d690efe0efac5e9e2fbf486476c0a85c8"},
|
||||
{file = "langchain_cohere-0.1.8-py3-none-any.whl", hash = "sha256:d3ef73d5050513ff3ca0f07c8f3f73b7773eec182312aae92138d3a0ad33e631"},
|
||||
{file = "langchain_cohere-0.1.8.tar.gz", hash = "sha256:edbeca8d041186d2831b495d9a392a0a94d15b0e2c98863e0a0cd001fc888842"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cohere = ">=5.5,<6.0"
|
||||
langchain-core = ">=0.1.42,<0.3"
|
||||
cohere = ">=5.5.6,<6.0"
|
||||
langchain-core = ">=0.2.0,<0.3"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-community"
|
||||
version = "0.0.38"
|
||||
version = "0.2.6"
|
||||
description = "Community contributed LangChain integrations."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a"},
|
||||
{file = "langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d"},
|
||||
{file = "langchain_community-0.2.6-py3-none-any.whl", hash = "sha256:758cc800acfe5dd396bf8ba1b57c4792639ead0eab48ed0367f0732ec6ee1f68"},
|
||||
{file = "langchain_community-0.2.6.tar.gz", hash = "sha256:40ce09a50ed798aa651ddb34c8978200fa8589b9813c7a28ce8af027bbf249f0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
dataclasses-json = ">=0.5.7,<0.7"
|
||||
langchain-core = ">=0.1.52,<0.2.0"
|
||||
langchain = ">=0.2.6,<0.3.0"
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
langsmith = ">=0.1.0,<0.2.0"
|
||||
numpy = ">=1,<2"
|
||||
numpy = [
|
||||
{version = ">=1.26.0,<2.0.0", markers = "python_version >= \"3.12\""},
|
||||
{version = ">=1,<2", markers = "python_version < \"3.12\""},
|
||||
]
|
||||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
cli = ["typer (>=0.9.0,<0.10.0)"]
|
||||
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "azure-identity (>=1.15.0,<2.0.0)", "azure-search-documents (==11.4.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.6,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "httpx-sse (>=0.4.0,<0.5.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "oracledb (>=2.2.0,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pyjwt (>=2.8.0,<3.0.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.1.52"
|
||||
version = "0.2.11"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_core-0.1.52-py3-none-any.whl", hash = "sha256:62566749c92e8a1181c255c788548dc16dbc319d896cd6b9c95dc17af9b2a6db"},
|
||||
{file = "langchain_core-0.1.52.tar.gz", hash = "sha256:084c3fc452f5a6966c28ab3ec5dbc8b8d26fc3f63378073928f4e29d90b6393f"},
|
||||
{file = "langchain_core-0.2.11-py3-none-any.whl", hash = "sha256:c7ca4dc4d88e3c69fd7916c95a7027c2b1a11c2db5a51141c3ceb8afac212208"},
|
||||
{file = "langchain_core-0.2.11.tar.gz", hash = "sha256:7a4661b50604eeb20c3373fbfd8a4f1b74482a6ab4e0f9df11e96821ead8ef0c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jsonpatch = ">=1.33,<2.0"
|
||||
langsmith = ">=0.1.0,<0.2.0"
|
||||
packaging = ">=23.2,<24.0"
|
||||
pydantic = ">=1,<3"
|
||||
langsmith = ">=0.1.75,<0.2.0"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = [
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||
]
|
||||
PyYAML = ">=5.3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
extended-testing = ["jinja2 (>=3,<4)"]
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.1.7"
|
||||
version = "0.1.14"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_openai-0.1.7-py3-none-any.whl", hash = "sha256:39c3cb22bb739900ae8294d4d9939a6138c0ca7ad11198e57038eb14c08d04ec"},
|
||||
{file = "langchain_openai-0.1.7.tar.gz", hash = "sha256:fd7e1c33ba8e2cab4b2154f3a2fd4a0d9cc6518b41cf49bb87255f9f732a4896"},
|
||||
{file = "langchain_openai-0.1.14-py3-none-any.whl", hash = "sha256:fcd34cc5b5713798908a5828d364b4426e3b1afccbd564d344e5477acb86634a"},
|
||||
{file = "langchain_openai-0.1.14.tar.gz", hash = "sha256:1f13d6041e8bedddf6eb47ccad7416e05af38fa169324f7f1bdf4f385780f8d8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.1.46,<0.3"
|
||||
openai = ">=1.24.0,<2.0.0"
|
||||
langchain-core = ">=0.2.2,<0.3"
|
||||
openai = ">=1.32.0,<2.0.0"
|
||||
tiktoken = ">=0.7,<1"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.0.2"
|
||||
version = "0.2.2"
|
||||
description = "LangChain text splitting utilities"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d"},
|
||||
{file = "langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1"},
|
||||
{file = "langchain_text_splitters-0.2.2-py3-none-any.whl", hash = "sha256:1c80d4b11b55e2995f02d2a326c0323ee1eeff24507329bb22924e420c782dff"},
|
||||
{file = "langchain_text_splitters-0.2.2.tar.gz", hash = "sha256:a1e45de10919fa6fb080ef0525deab56557e9552083600455cb9fa4238076140"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.1.28,<0.3"
|
||||
|
||||
[package.extras]
|
||||
extended-testing = ["beautifulsoup4 (>=4.12.3,<5.0.0)", "lxml (>=4.9.3,<6.0)"]
|
||||
langchain-core = ">=0.2.10,<0.3.0"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.82"
|
||||
version = "0.1.84"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.82-py3-none-any.whl", hash = "sha256:9b3653e7d316036b0c60bf0bc3e280662d660f485a4ebd8e5c9d84f9831ae79c"},
|
||||
{file = "langsmith-0.1.82.tar.gz", hash = "sha256:c02e2bbc488c10c13b52c69d271eb40bd38da078d37b6ae7ae04a18bd48140be"},
|
||||
{file = "langsmith-0.1.84-py3-none-any.whl", hash = "sha256:01f3c6390dba26c583bac8dd0e551ce3d0509c7f55cad714db0b5c8d36e4c7ff"},
|
||||
{file = "langsmith-0.1.84.tar.gz", hash = "sha256:5220c0439838b9a5bd320fd3686be505c5083dcee22d2452006c23891153bea1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
orjson = ">=3.9.14,<4.0.0"
|
||||
pydantic = [
|
||||
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||
{version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""},
|
||||
{version = ">=1,<3", markers = "python_full_version < \"3.12.4\""},
|
||||
]
|
||||
requests = ">=2,<3"
|
||||
|
||||
@@ -2667,6 +2673,22 @@ files = [
|
||||
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memzero"
|
||||
version = "0.0.7"
|
||||
description = "Long-term memory for AI Agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
files = [
|
||||
{file = "memzero-0.0.7-py3-none-any.whl", hash = "sha256:65f6da88d46263dbc05621fcd01bd09616d0e7f082d55ed9899dc2152491ffd2"},
|
||||
{file = "memzero-0.0.7.tar.gz", hash = "sha256:0c1f413d8ee0ade955fe9f8b8f5aff2cf58bc94869537aca62139db3d9f50725"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.27.0,<0.28.0"
|
||||
posthog = ">=3.5.0,<4.0.0"
|
||||
pydantic = ">=2.7.3,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "mergedeep"
|
||||
version = "1.3.4"
|
||||
@@ -2742,13 +2764,13 @@ pyyaml = ">=5.1"
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-material"
|
||||
version = "9.5.27"
|
||||
version = "9.5.28"
|
||||
description = "Documentation that simply works"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocs_material-9.5.27-py3-none-any.whl", hash = "sha256:af8cc263fafa98bb79e9e15a8c966204abf15164987569bd1175fd66a7705182"},
|
||||
{file = "mkdocs_material-9.5.27.tar.gz", hash = "sha256:a7d4a35f6d4a62b0c43a0cfe7e987da0980c13587b5bc3c26e690ad494427ec0"},
|
||||
{file = "mkdocs_material-9.5.28-py3-none-any.whl", hash = "sha256:ff48b11b2a9f705dd210409ec3b418ab443dd36d96915bcba45a41f10ea27bfd"},
|
||||
{file = "mkdocs_material-9.5.28.tar.gz", hash = "sha256:9cba305283ad1600e3d0a67abe72d7a058b54793b47be39930911a588fe0336b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3214,13 +3236,13 @@ sympy = "*"
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.35.7"
|
||||
version = "1.35.10"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.35.7-py3-none-any.whl", hash = "sha256:3d1e0b0aac9b0db69a972d36dc7efa7563f8e8d65550b27a48f2a0c2ec207e80"},
|
||||
{file = "openai-1.35.7.tar.gz", hash = "sha256:009bfa1504c9c7ef64d87be55936d142325656bbc6d98c68b669d6472e4beb09"},
|
||||
{file = "openai-1.35.10-py3-none-any.whl", hash = "sha256:962cb5c23224b5cbd16078308dabab97a08b0a5ad736a4fdb3dc2ffc44ac974f"},
|
||||
{file = "openai-1.35.10.tar.gz", hash = "sha256:85966949f4f960f3e4b239a659f9fd64d3a97ecc43c44dc0a044b5c7f11cccc6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3419,57 +3441,62 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "orjson"
|
||||
version = "3.10.5"
|
||||
version = "3.10.6"
|
||||
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
|
||||
{file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
|
||||
{file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
|
||||
{file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
|
||||
{file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"},
|
||||
{file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"},
|
||||
{file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"},
|
||||
{file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"},
|
||||
{file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
|
||||
{file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
|
||||
{file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
|
||||
{file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"},
|
||||
{file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"},
|
||||
{file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"},
|
||||
{file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"},
|
||||
{file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"},
|
||||
{file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"},
|
||||
{file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"},
|
||||
{file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"},
|
||||
{file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"},
|
||||
{file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"},
|
||||
{file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"},
|
||||
{file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"},
|
||||
{file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"},
|
||||
{file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"},
|
||||
{file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"},
|
||||
{file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"},
|
||||
{file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3777,6 +3804,34 @@ files = [
|
||||
{file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
version = "5.9.8"
|
||||
description = "Cross-platform lib for process and system monitoring in Python."
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
files = [
|
||||
{file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"},
|
||||
{file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"},
|
||||
{file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"},
|
||||
{file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"},
|
||||
{file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"},
|
||||
{file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"},
|
||||
{file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"},
|
||||
{file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"},
|
||||
{file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"},
|
||||
{file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"},
|
||||
{file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"},
|
||||
{file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"},
|
||||
{file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"},
|
||||
{file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"},
|
||||
{file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"},
|
||||
{file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
|
||||
|
||||
[[package]]
|
||||
name = "pulsar-client"
|
||||
version = "3.5.0"
|
||||
@@ -3921,21 +3976,21 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.8.0"
|
||||
version = "2.8.2"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.8.0-py3-none-any.whl", hash = "sha256:ead4f3a1e92386a734ca1411cb25d94147cf8778ed5be6b56749047676d6364e"},
|
||||
{file = "pydantic-2.8.0.tar.gz", hash = "sha256:d970ffb9d030b710795878940bd0489842c638e7252fc4a19c3ae2f7da4d6141"},
|
||||
{file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"},
|
||||
{file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.4.0"
|
||||
pydantic-core = "2.20.0"
|
||||
pydantic-core = "2.20.1"
|
||||
typing-extensions = [
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -3943,99 +3998,100 @@ email = ["email-validator (>=2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.20.0"
|
||||
version = "2.20.1"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:e9dcd7fb34f7bfb239b5fa420033642fff0ad676b765559c3737b91f664d4fa9"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:649a764d9b0da29816889424697b2a3746963ad36d3e0968784ceed6e40c6355"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7701df088d0b05f3460f7ba15aec81ac8b0fb5690367dfd072a6c38cf5b7fdb5"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab760f17c3e792225cdaef31ca23c0aea45c14ce80d8eff62503f86a5ab76bff"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb1ad5b4d73cde784cf64580166568074f5ccd2548d765e690546cff3d80937d"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b81ec2efc04fc1dbf400647d4357d64fb25543bae38d2d19787d69360aad21c9"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4a9732a5cad764ba37f3aa873dccb41b584f69c347a57323eda0930deec8e10"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6dc85b9e10cc21d9c1055f15684f76fa4facadddcb6cd63abab702eb93c98943"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:21d9f7e24f63fdc7118e6cc49defaab8c1d27570782f7e5256169d77498cf7c7"},
|
||||
{file = "pydantic_core-2.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b315685832ab9287e6124b5d74fc12dda31e6421d7f6b08525791452844bc2d"},
|
||||
{file = "pydantic_core-2.20.0-cp310-none-win32.whl", hash = "sha256:c3dc8ec8b87c7ad534c75b8855168a08a7036fdb9deeeed5705ba9410721c84d"},
|
||||
{file = "pydantic_core-2.20.0-cp310-none-win_amd64.whl", hash = "sha256:85770b4b37bb36ef93a6122601795231225641003e0318d23c6233c59b424279"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:58e251bb5a5998f7226dc90b0b753eeffa720bd66664eba51927c2a7a2d5f32c"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:78d584caac52c24240ef9ecd75de64c760bbd0e20dbf6973631815e3ef16ef8b"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5084ec9721f82bef5ff7c4d1ee65e1626783abb585f8c0993833490b63fe1792"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d0f52684868db7c218437d260e14d37948b094493f2646f22d3dda7229bbe3f"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1def125d59a87fe451212a72ab9ed34c118ff771e5473fef4f2f95d8ede26d75"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b34480fd6778ab356abf1e9086a4ced95002a1e195e8d2fd182b0def9d944d11"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d42669d319db366cb567c3b444f43caa7ffb779bf9530692c6f244fc635a41eb"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53b06aea7a48919a254b32107647be9128c066aaa6ee6d5d08222325f25ef175"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1f038156b696a1c39d763b2080aeefa87ddb4162c10aa9fabfefffc3dd8180fa"},
|
||||
{file = "pydantic_core-2.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3f0f3a4a23717280a5ee3ac4fb1f81d6fde604c9ec5100f7f6f987716bb8c137"},
|
||||
{file = "pydantic_core-2.20.0-cp311-none-win32.whl", hash = "sha256:316fe7c3fec017affd916a0c83d6f1ec697cbbbdf1124769fa73328e7907cc2e"},
|
||||
{file = "pydantic_core-2.20.0-cp311-none-win_amd64.whl", hash = "sha256:2d06a7fa437f93782e3f32d739c3ec189f82fca74336c08255f9e20cea1ed378"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d6f8c49657f3eb7720ed4c9b26624063da14937fc94d1812f1e04a2204db3e17"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad1bd2f377f56fec11d5cfd0977c30061cd19f4fa199bf138b200ec0d5e27eeb"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed741183719a5271f97d93bbcc45ed64619fa38068aaa6e90027d1d17e30dc8d"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d82e5ed3a05f2dcb89c6ead2fd0dbff7ac09bc02c1b4028ece2d3a3854d049ce"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2ba34a099576234671f2e4274e5bc6813b22e28778c216d680eabd0db3f7dad"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:879ae6bb08a063b3e1b7ac8c860096d8fd6b48dd9b2690b7f2738b8c835e744b"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0eefc7633a04c0694340aad91fbfd1986fe1a1e0c63a22793ba40a18fcbdc8"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73deadd6fd8a23e2f40b412b3ac617a112143c8989a4fe265050fd91ba5c0608"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:35681445dc85446fb105943d81ae7569aa7e89de80d1ca4ac3229e05c311bdb1"},
|
||||
{file = "pydantic_core-2.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0f6dd3612a3b9f91f2e63924ea18a4476656c6d01843ca20a4c09e00422195af"},
|
||||
{file = "pydantic_core-2.20.0-cp312-none-win32.whl", hash = "sha256:7e37b6bb6e90c2b8412b06373c6978d9d81e7199a40e24a6ef480e8acdeaf918"},
|
||||
{file = "pydantic_core-2.20.0-cp312-none-win_amd64.whl", hash = "sha256:7d4df13d1c55e84351fab51383520b84f490740a9f1fec905362aa64590b7a5d"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:d43e7ab3b65e4dc35a7612cfff7b0fd62dce5bc11a7cd198310b57f39847fd6c"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b6a24d7b5893392f2b8e3b7a0031ae3b14c6c1942a4615f0d8794fdeeefb08b"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2f13c3e955a087c3ec86f97661d9f72a76e221281b2262956af381224cfc243"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72432fd6e868c8d0a6849869e004b8bcae233a3c56383954c228316694920b38"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d70a8ff2d4953afb4cbe6211f17268ad29c0b47e73d3372f40e7775904bc28fc"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e49524917b8d3c2f42cd0d2df61178e08e50f5f029f9af1f402b3ee64574392"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4f0f71653b1c1bad0350bc0b4cc057ab87b438ff18fa6392533811ebd01439c"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:16197e6f4fdecb9892ed2436e507e44f0a1aa2cff3b9306d1c879ea2f9200997"},
|
||||
{file = "pydantic_core-2.20.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:763602504bf640b3ded3bba3f8ed8a1cc2fc6a87b8d55c1c5689f428c49c947e"},
|
||||
{file = "pydantic_core-2.20.0-cp313-none-win32.whl", hash = "sha256:a3f243f318bd9523277fa123b3163f4c005a3e8619d4b867064de02f287a564d"},
|
||||
{file = "pydantic_core-2.20.0-cp313-none-win_amd64.whl", hash = "sha256:03aceaf6a5adaad3bec2233edc5a7905026553916615888e53154807e404545c"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d6f2d8b8da1f03f577243b07bbdd3412eee3d37d1f2fd71d1513cbc76a8c1239"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a272785a226869416c6b3c1b7e450506152d3844207331f02f27173562c917e0"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efbb412d55a4ffe73963fed95c09ccb83647ec63b711c4b3752be10a56f0090b"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e4f46189d8740561b43655263a41aac75ff0388febcb2c9ec4f1b60a0ec12f3"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3df115f4a3c8c5e4d5acf067d399c6466d7e604fc9ee9acbe6f0c88a0c3cf"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a340d2bdebe819d08f605e9705ed551c3feb97e4fd71822d7147c1e4bdbb9508"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:616b9c2f882393d422ba11b40e72382fe975e806ad693095e9a3b67c59ea6150"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25c46bb2ff6084859bbcfdf4f1a63004b98e88b6d04053e8bf324e115398e9e7"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:23425eccef8f2c342f78d3a238c824623836c6c874d93c726673dbf7e56c78c0"},
|
||||
{file = "pydantic_core-2.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:52527e8f223ba29608d999d65b204676398009725007c9336651c2ec2d93cffc"},
|
||||
{file = "pydantic_core-2.20.0-cp38-none-win32.whl", hash = "sha256:1c3c5b7f70dd19a6845292b0775295ea81c61540f68671ae06bfe4421b3222c2"},
|
||||
{file = "pydantic_core-2.20.0-cp38-none-win_amd64.whl", hash = "sha256:8093473d7b9e908af1cef30025609afc8f5fd2a16ff07f97440fd911421e4432"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ee7785938e407418795e4399b2bf5b5f3cf6cf728077a7f26973220d58d885cf"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e75794883d635071cf6b4ed2a5d7a1e50672ab7a051454c76446ef1ebcdcc91"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:344e352c96e53b4f56b53d24728217c69399b8129c16789f70236083c6ceb2ac"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:978d4123ad1e605daf1ba5e01d4f235bcf7b6e340ef07e7122e8e9cfe3eb61ab"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c05eaf6c863781eb834ab41f5963604ab92855822a2062897958089d1335dad"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc7e43b4a528ffca8c9151b6a2ca34482c2fdc05e6aa24a84b7f475c896fc51d"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658287a29351166510ebbe0a75c373600cc4367a3d9337b964dada8d38bcc0f4"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1dacf660d6de692fe351e8c806e7efccf09ee5184865893afbe8e59be4920b4a"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3e147fc6e27b9a487320d78515c5f29798b539179f7777018cedf51b7749e4f4"},
|
||||
{file = "pydantic_core-2.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c867230d715a3dd1d962c8d9bef0d3168994ed663e21bf748b6e3a529a129aab"},
|
||||
{file = "pydantic_core-2.20.0-cp39-none-win32.whl", hash = "sha256:22b813baf0dbf612752d8143a2dbf8e33ccb850656b7850e009bad2e101fc377"},
|
||||
{file = "pydantic_core-2.20.0-cp39-none-win_amd64.whl", hash = "sha256:3a7235b46c1bbe201f09b6f0f5e6c36b16bad3d0532a10493742f91fbdc8035f"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cafde15a6f7feaec2f570646e2ffc5b73412295d29134a29067e70740ec6ee20"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2aec8eeea0b08fd6bc2213d8e86811a07491849fd3d79955b62d83e32fa2ad5f"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840200827984f1c4e114008abc2f5ede362d6e11ed0b5931681884dd41852ff1"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ea1d8b7df522e5ced34993c423c3bf3735c53df8b2a15688a2f03a7d678800"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5b8376a867047bf08910573deb95d3c8dfb976eb014ee24f3b5a61ccc5bee1b"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d08264b4460326cefacc179fc1411304d5af388a79910832835e6f641512358b"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7a3639011c2e8a9628466f616ed7fb413f30032b891898e10895a0a8b5857d6c"},
|
||||
{file = "pydantic_core-2.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:05e83ce2f7eba29e627dd8066aa6c4c0269b2d4f889c0eba157233a353053cea"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:603a843fea76a595c8f661cd4da4d2281dff1e38c4a836a928eac1a2f8fe88e4"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac76f30d5d3454f4c28826d891fe74d25121a346c69523c9810ebba43f3b1cec"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e3b1d4b1b3f6082849f9b28427ef147a5b46a6132a3dbaf9ca1baa40c88609"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2761f71faed820e25ec62eacba670d1b5c2709bb131a19fcdbfbb09884593e5a"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a0586cddbf4380e24569b8a05f234e7305717cc8323f50114dfb2051fcbce2a3"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b8c46a8cf53e849eea7090f331ae2202cd0f1ceb090b00f5902c423bd1e11805"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b4a085bd04af7245e140d1b95619fe8abb445a3d7fdf219b3f80c940853268ef"},
|
||||
{file = "pydantic_core-2.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:116b326ac82c8b315e7348390f6d30bcfe6e688a7d3f1de50ff7bcc2042a23c2"},
|
||||
{file = "pydantic_core-2.20.0.tar.gz", hash = "sha256:366be8e64e0cb63d87cf79b4e1765c0703dd6313c729b22e7b9e378db6b96877"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"},
|
||||
{file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"},
|
||||
{file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"},
|
||||
{file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"},
|
||||
{file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"},
|
||||
{file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"},
|
||||
{file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"},
|
||||
{file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"},
|
||||
{file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"},
|
||||
{file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"},
|
||||
{file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"},
|
||||
{file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"},
|
||||
{file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
|
||||
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4163,13 +4219,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.369"
|
||||
version = "1.1.370"
|
||||
description = "Command line wrapper for pyright"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pyright-1.1.369-py3-none-any.whl", hash = "sha256:06d5167a8d7be62523ced0265c5d2f1e022e110caf57a25d92f50fb2d07bcda0"},
|
||||
{file = "pyright-1.1.369.tar.gz", hash = "sha256:ad290710072d021e213b98cc7a2f90ae3a48609ef5b978f749346d1a47eb9af8"},
|
||||
{file = "pyright-1.1.370-py3-none-any.whl", hash = "sha256:fc721601e480a69989775bfc210534a6ca0110ebd0c065244a8d3a151294fc61"},
|
||||
{file = "pyright-1.1.370.tar.gz", hash = "sha256:d0d559d506fc41e3297f721aaa05a1b9f06beda5acc9ac64ca371ce94c28f960"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4588,13 +4644,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
version = "2.31.0"
|
||||
description = "Python HTTP for Humans."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
|
||||
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
|
||||
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
|
||||
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4994,13 +5050,13 @@ widechars = ["wcwidth"]
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "8.4.2"
|
||||
version = "8.5.0"
|
||||
description = "Retry code until it succeeds"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"},
|
||||
{file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"},
|
||||
{file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"},
|
||||
{file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -5227,13 +5283,13 @@ telegram = ["requests"]
|
||||
|
||||
[[package]]
|
||||
name = "trio"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
description = "A friendly Python library for async concurrency and I/O"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "trio-0.25.1-py3-none-any.whl", hash = "sha256:e42617ba091e7b2e50c899052e83a3c403101841de925187f61e7b7eaebdf3fb"},
|
||||
{file = "trio-0.25.1.tar.gz", hash = "sha256:9f5314f014ea3af489e77b001861c535005c3858d38ec46b6b071ebfa339d7fb"},
|
||||
{file = "trio-0.26.0-py3-none-any.whl", hash = "sha256:bb9c1b259591af941fccfbabbdc65bc7ed764bd2db76428454c894cd5e3d2032"},
|
||||
{file = "trio-0.26.0.tar.gz", hash = "sha256:67c5ec3265dd4abc7b1d1ab9ca4fe4c25b896f9c93dac73713778adab487f9c4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -6028,9 +6084,10 @@ doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linke
|
||||
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
|
||||
|
||||
[extras]
|
||||
agentops = ["agentops"]
|
||||
tools = ["crewai-tools"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<=3.13"
|
||||
content-hash = "d4ea0d71723ecc2bad629c387dd786b6a96c553b1e3e298516fdfcd2059d1019"
|
||||
content-hash = "0dbf6f6e2e841fb3eec4ff87ea5d6b430f29702118fee91307983c6b2581e59e"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "crewai"
|
||||
version = "0.35.8"
|
||||
version = "0.36.0"
|
||||
description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks."
|
||||
authors = ["Joao Moura <joao@crewai.com>"]
|
||||
readme = "README.md"
|
||||
@@ -14,20 +14,20 @@ Repository = "https://github.com/joaomdmoura/crewai"
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<=3.13"
|
||||
pydantic = "^2.4.2"
|
||||
langchain = ">=0.1.4,<0.2.0"
|
||||
langchain = ">0.2,<=0.3"
|
||||
openai = "^1.13.3"
|
||||
opentelemetry-api = "^1.22.0"
|
||||
opentelemetry-sdk = "^1.22.0"
|
||||
opentelemetry-exporter-otlp-proto-http = "^1.22.0"
|
||||
instructor = "1.3.3"
|
||||
regex = "^2023.12.25"
|
||||
crewai-tools = { version = "^0.4.6", optional = true }
|
||||
crewai-tools = { version = "^0.4.8", optional = true }
|
||||
click = "^8.1.7"
|
||||
python-dotenv = "^1.0.0"
|
||||
appdirs = "^1.4.4"
|
||||
jsonref = "^1.1.0"
|
||||
agentops = { version = "^0.1.9", optional = true }
|
||||
embedchain = "^0.1.113"
|
||||
embedchain = "^0.1.114"
|
||||
|
||||
[tool.poetry.extras]
|
||||
tools = ["crewai-tools"]
|
||||
@@ -45,7 +45,7 @@ mkdocs-material = { extras = ["imaging"], version = "^9.5.7" }
|
||||
mkdocs-material-extensions = "^1.3.1"
|
||||
pillow = "^10.2.0"
|
||||
cairosvg = "^2.7.1"
|
||||
crewai-tools = "^0.4.6"
|
||||
crewai-tools = "^0.4.8"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^8.0.0"
|
||||
|
||||
@@ -20,7 +20,7 @@ from crewai.utilities.training_handler import CrewTrainingHandler
|
||||
|
||||
agentops = None
|
||||
try:
|
||||
import agentops
|
||||
import agentops # type: ignore # Name "agentops" already defined on line 21
|
||||
from agentops import track_agent
|
||||
except ImportError:
|
||||
|
||||
@@ -60,8 +60,8 @@ class Agent(BaseAgent):
|
||||
default=None,
|
||||
description="Maximum execution time for an agent to execute a task",
|
||||
)
|
||||
agent_ops_agent_name: str = None
|
||||
agent_ops_agent_id: str = None
|
||||
agent_ops_agent_name: str = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str")
|
||||
agent_ops_agent_id: str = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str")
|
||||
cache_handler: InstanceOf[CacheHandler] = Field(
|
||||
default=None, description="An instance of the CacheHandler class."
|
||||
)
|
||||
@@ -90,6 +90,9 @@ class Agent(BaseAgent):
|
||||
response_template: Optional[str] = Field(
|
||||
default=None, description="Response format for the agent."
|
||||
)
|
||||
tools_results: Optional[List[Any]] = Field(
|
||||
default=[], description="Results of the tools used by the agent."
|
||||
)
|
||||
allow_code_execution: Optional[bool] = Field(
|
||||
default=False, description="Enable code execution for the agent."
|
||||
)
|
||||
@@ -116,7 +119,8 @@ class Agent(BaseAgent):
|
||||
self.llm.callbacks.append(token_handler)
|
||||
|
||||
if agentops and not any(
|
||||
isinstance(handler, agentops.LangchainCallbackHandler) for handler in self.llm.callbacks
|
||||
isinstance(handler, agentops.LangchainCallbackHandler)
|
||||
for handler in self.llm.callbacks
|
||||
):
|
||||
agentops.stop_instrumenting()
|
||||
self.llm.callbacks.append(agentops.LangchainCallbackHandler())
|
||||
@@ -144,8 +148,7 @@ class Agent(BaseAgent):
|
||||
Output of the agent
|
||||
"""
|
||||
if self.tools_handler:
|
||||
# type: ignore # Incompatible types in assignment (expression has type "dict[Never, Never]", variable has type "ToolCalling")
|
||||
self.tools_handler.last_used_tool = {}
|
||||
self.tools_handler.last_used_tool = {} # type: ignore # Incompatible types in assignment (expression has type "dict[Never, Never]", variable has type "ToolCalling")
|
||||
|
||||
task_prompt = task.prompt()
|
||||
|
||||
@@ -165,8 +168,8 @@ class Agent(BaseAgent):
|
||||
task_prompt += self.i18n.slice("memory").format(memory=memory)
|
||||
|
||||
tools = tools or self.tools
|
||||
# type: ignore # Argument 1 to "_parse_tools" of "Agent" has incompatible type "list[Any] | None"; expected "list[Any]"
|
||||
parsed_tools = self._parse_tools(tools or [])
|
||||
|
||||
parsed_tools = self._parse_tools(tools or []) # type: ignore # Argument 1 to "_parse_tools" of "Agent" has incompatible type "list[Any] | None"; expected "list[Any]"
|
||||
self.create_agent_executor(tools=tools)
|
||||
self.agent_executor.tools = parsed_tools
|
||||
self.agent_executor.task = task
|
||||
@@ -188,6 +191,14 @@ class Agent(BaseAgent):
|
||||
)["output"]
|
||||
if self.max_rpm:
|
||||
self._rpm_controller.stop_rpm_counter()
|
||||
|
||||
# If there was any tool in self.tools_results that had result_as_answer
|
||||
# set to True, return the results of the last tool that had
|
||||
# result_as_answer set to True
|
||||
for tool_result in self.tools_results: # type: ignore # Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable)
|
||||
if tool_result.get("result_as_answer", False):
|
||||
result = tool_result["result"]
|
||||
|
||||
return result
|
||||
|
||||
def format_log_to_str(
|
||||
@@ -288,7 +299,7 @@ class Agent(BaseAgent):
|
||||
def get_output_converter(self, llm, text, model, instructions):
|
||||
return Converter(llm=llm, text=text, model=model, instructions=instructions)
|
||||
|
||||
def _parse_tools(self, tools: List[Any]) -> List[LangChainTool]:
|
||||
def _parse_tools(self, tools: List[Any]) -> List[LangChainTool]: # type: ignore # Function "langchain_core.tools.tool" is not valid as a type
|
||||
"""Parse tools to be used for the task."""
|
||||
tools_list = []
|
||||
try:
|
||||
|
||||
@@ -191,7 +191,7 @@ class BaseAgent(ABC, BaseModel):
|
||||
"""Get the converter class for the agent to create json/pydantic outputs."""
|
||||
pass
|
||||
|
||||
def copy(self: T) -> T:
|
||||
def copy(self: T) -> T: # type: ignore # Signature of "copy" incompatible with supertype "BaseModel"
|
||||
"""Create a deep copy of the Agent."""
|
||||
exclude = {
|
||||
"id",
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||
from crewai.task import Task
|
||||
from crewai.utilities import I18N
|
||||
@@ -53,7 +55,7 @@ class BaseAgentTools(BaseModel, ABC):
|
||||
# {"task": "....", "coworker": "...."}
|
||||
agent_name = agent.casefold().replace('"', "").replace("\n", "")
|
||||
|
||||
agent = [
|
||||
agent = [ # type: ignore # Incompatible types in assignment (expression has type "list[BaseAgent]", variable has type "str | None")
|
||||
available_agent
|
||||
for available_agent in self.agents
|
||||
if available_agent.role.casefold().replace("\n", "") == agent_name
|
||||
@@ -73,9 +75,9 @@ class BaseAgentTools(BaseModel, ABC):
|
||||
)
|
||||
|
||||
agent = agent[0]
|
||||
task = Task(
|
||||
task = Task( # type: ignore # Incompatible types in assignment (expression has type "Task", variable has type "str")
|
||||
description=task,
|
||||
agent=agent,
|
||||
expected_output="Your best answer to your coworker asking you this, accounting for the context shared.",
|
||||
)
|
||||
return agent.execute_task(task, context)
|
||||
return agent.execute_task(task, context) # type: ignore # "str" has no attribute "execute_task"
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr
|
||||
|
||||
|
||||
@@ -27,8 +26,8 @@ class OutputConverter(BaseModel, ABC):
|
||||
llm: Any = Field(description="The language model to be used to convert the text.")
|
||||
model: Any = Field(description="The model to be used to convert the text.")
|
||||
instructions: str = Field(description="Conversion instructions to the LLM.")
|
||||
max_attemps: Optional[int] = Field(
|
||||
description="Max number of attemps to try to get the output formated.",
|
||||
max_attempts: Optional[int] = Field(
|
||||
description="Max number of attempts to try to get the output formatted.",
|
||||
default=3,
|
||||
)
|
||||
|
||||
@@ -42,7 +41,7 @@ class OutputConverter(BaseModel, ABC):
|
||||
"""Convert text to json."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _is_gpt(self, llm):
|
||||
@abstractmethod # type: ignore # Name "_is_gpt" already defined on line 25
|
||||
def _is_gpt(self, llm): # type: ignore # Name "_is_gpt" already defined on line 25
|
||||
"""Return if llm provided is of gpt from openai."""
|
||||
pass
|
||||
|
||||
@@ -15,19 +15,18 @@ from langchain.agents.agent import ExceptionTool
|
||||
from langchain.callbacks.manager import CallbackManagerForChainRun
|
||||
from langchain_core.agents import AgentAction, AgentFinish, AgentStep
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils.input import get_color_mapping
|
||||
from pydantic import InstanceOf
|
||||
|
||||
from crewai.agents.agent_builder.base_agent_executor_mixin import (
|
||||
CrewAgentExecutorMixin,
|
||||
)
|
||||
|
||||
from crewai.agents.tools_handler import ToolsHandler
|
||||
from crewai.tools.tool_usage import ToolUsage, ToolUsageErrorException
|
||||
from crewai.utilities import I18N
|
||||
from crewai.utilities.constants import TRAINING_DATA_FILE
|
||||
from crewai.utilities.training_handler import CrewTrainingHandler
|
||||
from crewai.utilities import I18N
|
||||
|
||||
|
||||
class CrewAgentExecutor(AgentExecutor, CrewAgentExecutorMixin):
|
||||
@@ -46,7 +45,7 @@ class CrewAgentExecutor(AgentExecutor, CrewAgentExecutorMixin):
|
||||
tools_handler: Optional[InstanceOf[ToolsHandler]] = None
|
||||
max_iterations: Optional[int] = 15
|
||||
have_forced_answer: bool = False
|
||||
force_answer_max_iterations: Optional[int] = None
|
||||
force_answer_max_iterations: Optional[int] = None # type: ignore # Incompatible types in assignment (expression has type "int | None", base class "CrewAgentExecutorMixin" defined the type as "int")
|
||||
step_callback: Optional[Any] = None
|
||||
system_template: Optional[str] = None
|
||||
prompt_template: Optional[str] = None
|
||||
@@ -243,6 +242,7 @@ class CrewAgentExecutor(AgentExecutor, CrewAgentExecutorMixin):
|
||||
tools_names=self.tools_names,
|
||||
function_calling_llm=self.function_calling_llm,
|
||||
task=self.task,
|
||||
agent=self.crew_agent,
|
||||
action=agent_action,
|
||||
)
|
||||
tool_calling = tool_usage.parse(agent_action.log)
|
||||
|
||||
@@ -12,4 +12,4 @@ reporting_task:
|
||||
Make sure the report is detailed and contains any and all relevant information.
|
||||
expected_output: >
|
||||
A fully fledge reports with the mains topics, each with a full section of information.
|
||||
Formated as markdown with out '```'
|
||||
Formatted as markdown without '```'
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from concurrent.futures import Future
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
@@ -31,7 +32,10 @@ from crewai.tasks.task_output import TaskOutput
|
||||
from crewai.telemetry import Telemetry
|
||||
from crewai.tools.agent_tools import AgentTools
|
||||
from crewai.utilities import I18N, FileHandler, Logger, RPMController
|
||||
from crewai.utilities.constants import TRAINED_AGENTS_DATA_FILE, TRAINING_DATA_FILE
|
||||
from crewai.utilities.crew_json_encoder import CrewJSONEncoder
|
||||
from crewai.utilities.evaluators.task_evaluator import TaskEvaluator
|
||||
from crewai.utilities.file_handler import TaskOutputJsonHandler
|
||||
from crewai.utilities.formatter import aggregate_raw_outputs_from_task_outputs
|
||||
from crewai.utilities.training_handler import CrewTrainingHandler
|
||||
|
||||
@@ -70,6 +74,7 @@ class Crew(BaseModel):
|
||||
_rpm_controller: RPMController = PrivateAttr()
|
||||
_logger: Logger = PrivateAttr()
|
||||
_file_handler: FileHandler = PrivateAttr()
|
||||
_task_output_handler: TaskOutputJsonHandler = PrivateAttr()
|
||||
_cache_handler: InstanceOf[CacheHandler] = PrivateAttr(default=CacheHandler())
|
||||
_short_term_memory: Optional[InstanceOf[ShortTermMemory]] = PrivateAttr()
|
||||
_long_term_memory: Optional[InstanceOf[LongTermMemory]] = PrivateAttr()
|
||||
@@ -131,6 +136,16 @@ class Crew(BaseModel):
|
||||
default=False,
|
||||
description="output_log_file",
|
||||
)
|
||||
task_execution_output_json_files: Optional[List[str]] = Field(
|
||||
default=None,
|
||||
description="List of file paths for task execution JSON files.",
|
||||
)
|
||||
execution_logs: List[Dict[str, Any]] = Field(
|
||||
default=[],
|
||||
description="List of execution logs for tasks",
|
||||
)
|
||||
|
||||
_log_file: str = PrivateAttr(default="crew_tasks_output.json")
|
||||
|
||||
@field_validator("id", mode="before")
|
||||
@classmethod
|
||||
@@ -163,6 +178,7 @@ class Crew(BaseModel):
|
||||
self._logger = Logger(self.verbose)
|
||||
if self.output_log_file:
|
||||
self._file_handler = FileHandler(self.output_log_file)
|
||||
self._task_output_handler = TaskOutputJsonHandler(self._log_file)
|
||||
self._rpm_controller = RPMController(max_rpm=self.max_rpm, logger=self._logger)
|
||||
self._telemetry = Telemetry()
|
||||
self._telemetry.set_tracer()
|
||||
@@ -223,6 +239,33 @@ class Crew(BaseModel):
|
||||
agent.set_rpm_controller(self._rpm_controller)
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_tasks(self):
|
||||
if self.process == Process.sequential:
|
||||
for task in self.tasks:
|
||||
if task.agent is None:
|
||||
raise PydanticCustomError(
|
||||
"missing_agent_in_task",
|
||||
f"Sequential process error: Agent is missing in the task with the following description: {task.description}", # type: ignore # Argument of type "str" cannot be assigned to parameter "message_template" of type "LiteralString"
|
||||
{},
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
@model_validator(mode="after")
|
||||
def check_tasks_in_hierarchical_process_not_async(self):
|
||||
"""Validates that the tasks in hierarchical process are not flagged with async_execution."""
|
||||
if self.process == Process.hierarchical:
|
||||
for task in self.tasks:
|
||||
if task.async_execution:
|
||||
raise PydanticCustomError(
|
||||
"async_execution_in_hierarchical_process",
|
||||
"Hierarchical process error: Tasks cannot be flagged with async_execution.",
|
||||
{},
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def _setup_from_config(self):
|
||||
assert self.config is not None, "Config should not be None."
|
||||
|
||||
@@ -261,6 +304,9 @@ class Crew(BaseModel):
|
||||
for agent in self.agents:
|
||||
agent.allow_delegation = False
|
||||
|
||||
CrewTrainingHandler(TRAINING_DATA_FILE).initialize_file()
|
||||
CrewTrainingHandler(TRAINED_AGENTS_DATA_FILE).initialize_file()
|
||||
|
||||
def train(self, n_iterations: int, inputs: Optional[Dict[str, Any]] = {}) -> None:
|
||||
"""Trains the crew for a given number of iterations."""
|
||||
self._setup_for_training()
|
||||
@@ -269,14 +315,14 @@ class Crew(BaseModel):
|
||||
self._train_iteration = n_iteration
|
||||
self.kickoff(inputs=inputs)
|
||||
|
||||
training_data = CrewTrainingHandler("training_data.pkl").load()
|
||||
training_data = CrewTrainingHandler(TRAINING_DATA_FILE).load()
|
||||
|
||||
for agent in self.agents:
|
||||
result = TaskEvaluator(agent).evaluate_training_data(
|
||||
training_data=training_data, agent_id=str(agent.id)
|
||||
)
|
||||
|
||||
CrewTrainingHandler("trained_agents_data.pkl").save_trained_data(
|
||||
CrewTrainingHandler(TRAINED_AGENTS_DATA_FILE).save_trained_data(
|
||||
agent_id=str(agent.role), trained_data=result.model_dump()
|
||||
)
|
||||
|
||||
@@ -286,38 +332,36 @@ class Crew(BaseModel):
|
||||
) -> CrewOutput:
|
||||
"""Starts the crew to work on its assigned tasks."""
|
||||
self._execution_span = self._telemetry.crew_execution_span(self, inputs)
|
||||
self.execution_logs = []
|
||||
if inputs is not None:
|
||||
self._interpolate_inputs(inputs)
|
||||
self._interpolate_inputs(inputs)
|
||||
# self._interpolate_inputs(inputs)
|
||||
self._set_tasks_callbacks()
|
||||
|
||||
i18n = I18N(prompt_file=self.prompt_file)
|
||||
|
||||
for agent in self.agents:
|
||||
# type: ignore # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
||||
agent.i18n = i18n
|
||||
# type: ignore[attr-defined] # Argument 1 to "_interpolate_inputs" of "Crew" has incompatible type "dict[str, Any] | None"; expected "dict[str, Any]"
|
||||
agent.crew = self # type: ignore[attr-defined]
|
||||
# TODO: Create an AgentFunctionCalling protocol for future refactoring
|
||||
if not agent.function_calling_llm:
|
||||
agent.function_calling_llm = self.function_calling_llm
|
||||
if not agent.function_calling_llm: # type: ignore # "BaseAgent" has no attribute "function_calling_llm"
|
||||
agent.function_calling_llm = self.function_calling_llm # type: ignore # "BaseAgent" has no attribute "function_calling_llm"
|
||||
|
||||
if agent.allow_code_execution:
|
||||
agent.tools += agent.get_code_execution_tools()
|
||||
if agent.allow_code_execution: # type: ignore # BaseAgent" has no attribute "allow_code_execution"
|
||||
agent.tools += agent.get_code_execution_tools() # type: ignore # "BaseAgent" has no attribute "get_code_execution_tools"; maybe "get_delegation_tools"?
|
||||
|
||||
if not agent.step_callback:
|
||||
agent.step_callback = self.step_callback
|
||||
if not agent.step_callback: # type: ignore # "BaseAgent" has no attribute "step_callback"
|
||||
agent.step_callback = self.step_callback # type: ignore # "BaseAgent" has no attribute "step_callback"
|
||||
|
||||
agent.create_agent_executor()
|
||||
|
||||
metrics = []
|
||||
|
||||
if self.process == Process.sequential:
|
||||
result = self._run_sequential_process()
|
||||
result = self._run_sequential_process(inputs)
|
||||
elif self.process == Process.hierarchical:
|
||||
# type: ignore # Unpacking a string is disallowed
|
||||
result, manager_metrics = self._run_hierarchical_process()
|
||||
# type: ignore # Cannot determine type of "manager_metrics"
|
||||
result, manager_metrics = self._run_hierarchical_process() # type: ignore # Incompatible types in assignment (expression has type "str | dict[str, Any]", variable has type "str")
|
||||
metrics.append(manager_metrics)
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
@@ -373,6 +417,10 @@ class Crew(BaseModel):
|
||||
asyncio.create_task(run_crew(crew_copies[i], inputs[i]))
|
||||
for i in range(len(inputs))
|
||||
]
|
||||
tasks = [
|
||||
asyncio.create_task(run_crew(crew_copies[i], inputs[i]))
|
||||
for i in range(len(inputs))
|
||||
]
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
@@ -389,15 +437,68 @@ class Crew(BaseModel):
|
||||
|
||||
self.usage_metrics = total_usage_metrics
|
||||
|
||||
total_usage_metrics = {
|
||||
"total_tokens": 0,
|
||||
"prompt_tokens": 0,
|
||||
"completion_tokens": 0,
|
||||
"successful_requests": 0,
|
||||
}
|
||||
for crew in crew_copies:
|
||||
if crew.usage_metrics:
|
||||
for key in total_usage_metrics:
|
||||
total_usage_metrics[key] += crew.usage_metrics.get(key, 0)
|
||||
|
||||
self.usage_metrics = total_usage_metrics
|
||||
|
||||
return results
|
||||
|
||||
def _run_sequential_process(self) -> CrewOutput:
|
||||
def _store_execution_log(self, task, output, task_index, inputs=None):
|
||||
log = {
|
||||
"task_id": str(task.id),
|
||||
"description": task.description,
|
||||
"expected_output": task.expected_output,
|
||||
"agent_role": task.agent.role if task.agent else "None",
|
||||
"output": {
|
||||
"description": task.description,
|
||||
"summary": task.description,
|
||||
"raw_output": output.raw_output,
|
||||
"pydantic_output": output.pydantic_output,
|
||||
"json_output": output.json_output,
|
||||
"agent": task.agent.role if task.agent else "None",
|
||||
},
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"task_index": task_index,
|
||||
# "output_py": output.pydantic_output,
|
||||
"inputs": inputs,
|
||||
# "task": task.model_dump(),
|
||||
}
|
||||
self.execution_logs.append(log)
|
||||
self._task_output_handler.append(log)
|
||||
|
||||
def _run_sequential_process(
|
||||
self, inputs: Dict[str, Any] | None = None
|
||||
) -> CrewOutput:
|
||||
"""Executes tasks sequentially and returns the final output."""
|
||||
self.execution_logs = []
|
||||
task_outputs = self._execute_tasks(self.tasks, inputs=inputs)
|
||||
final_string_output = aggregate_raw_outputs_from_task_outputs(task_outputs)
|
||||
self._finish_execution(final_string_output)
|
||||
self.save_execution_logs()
|
||||
token_usage = self.calculate_usage_metrics()
|
||||
|
||||
return self._format_output(task_outputs, token_usage)
|
||||
|
||||
def _execute_tasks(
|
||||
self,
|
||||
tasks,
|
||||
start_index=0,
|
||||
is_replay=False,
|
||||
inputs: Dict[str, Any] | None = None,
|
||||
):
|
||||
task_outputs: List[TaskOutput] = []
|
||||
futures: List[Tuple[Task, Future[TaskOutput]]] = []
|
||||
|
||||
for task in self.tasks:
|
||||
if task.agent.allow_delegation: # type: ignore # Item "None" of "Agent | None" has no attribute "allow_delegation"
|
||||
for task_index, task in enumerate(tasks[start_index:], start=start_index):
|
||||
if task.agent and task.agent.allow_delegation:
|
||||
agents_for_delegation = [
|
||||
agent for agent in self.agents if agent != task.agent
|
||||
]
|
||||
@@ -405,9 +506,15 @@ class Crew(BaseModel):
|
||||
task.tools += task.agent.get_delegation_tools(agents_for_delegation)
|
||||
|
||||
role = task.agent.role if task.agent is not None else "None"
|
||||
self._logger.log("debug", f"== Working Agent: {role}", color="bold_purple")
|
||||
log_prefix = "== Replaying from" if is_replay else "=="
|
||||
log_color = "bold_blue" if is_replay else "bold_purple"
|
||||
self._logger.log(
|
||||
"info", f"== Starting Task: {task.description}", color="bold_purple"
|
||||
"debug", f"{log_prefix} Working Agent: {role}", color=log_color
|
||||
)
|
||||
self._logger.log(
|
||||
"info",
|
||||
f"{log_prefix} {'Replaying' if is_replay else 'Starting'} Task: {task.description}",
|
||||
color=log_color,
|
||||
)
|
||||
|
||||
if self.output_log_file:
|
||||
@@ -422,16 +529,10 @@ class Crew(BaseModel):
|
||||
)
|
||||
futures.append((task, future))
|
||||
else:
|
||||
# Before executing a synchronous task, wait for all async tasks to complete
|
||||
if futures:
|
||||
# Clear task_outputs before processing async tasks
|
||||
task_outputs = []
|
||||
for future_task, future in futures:
|
||||
task_output = future.result()
|
||||
task_outputs.append(task_output)
|
||||
self._process_task_result(future_task, task_output)
|
||||
|
||||
# Clear the futures list after processing all async results
|
||||
task_outputs = self._process_async_tasks(
|
||||
futures, task_index, inputs
|
||||
)
|
||||
futures.clear()
|
||||
|
||||
context = aggregate_raw_outputs_from_task_outputs(task_outputs)
|
||||
@@ -440,21 +541,12 @@ class Crew(BaseModel):
|
||||
)
|
||||
task_outputs = [task_output]
|
||||
self._process_task_result(task, task_output)
|
||||
self._store_execution_log(task, task_output, task_index, inputs)
|
||||
|
||||
if futures:
|
||||
# Clear task_outputs before processing async tasks
|
||||
task_outputs = []
|
||||
for future_task, future in futures:
|
||||
task_output = future.result()
|
||||
task_outputs.append(task_output)
|
||||
self._process_task_result(future_task, task_output)
|
||||
task_outputs = self._process_async_tasks(futures, len(tasks), inputs)
|
||||
|
||||
final_string_output = aggregate_raw_outputs_from_task_outputs(task_outputs)
|
||||
self._finish_execution(final_string_output)
|
||||
|
||||
token_usage = self.calculate_usage_metrics()
|
||||
|
||||
return self._format_output(task_outputs, token_usage)
|
||||
return task_outputs
|
||||
|
||||
def _process_task_result(self, task: Task, output: TaskOutput) -> None:
|
||||
role = task.agent.role if task.agent is not None else "None"
|
||||
@@ -462,13 +554,170 @@ class Crew(BaseModel):
|
||||
if self.output_log_file:
|
||||
self._file_handler.log(agent=role, task=output, status="completed")
|
||||
|
||||
def _process_async_tasks(
|
||||
self,
|
||||
futures: List[Tuple[Task, Future[TaskOutput]]],
|
||||
task_index: int,
|
||||
inputs: Dict[str, Any] | None = None,
|
||||
) -> List[TaskOutput]:
|
||||
task_outputs = []
|
||||
for future_task, future in futures:
|
||||
task_output = future.result()
|
||||
task_outputs.append(task_output)
|
||||
self._process_task_result(future_task, task_output)
|
||||
self._store_execution_log(future_task, task_output, task_index, inputs)
|
||||
|
||||
return task_outputs
|
||||
|
||||
def replay_from_task(self, task_id: str):
|
||||
stored_outputs = self._load_stored_outputs()
|
||||
start_index = next(
|
||||
(
|
||||
index
|
||||
for (index, d) in enumerate(stored_outputs)
|
||||
if d["task_id"] == str(task_id)
|
||||
),
|
||||
None,
|
||||
)
|
||||
if start_index is None:
|
||||
raise ValueError(f"Task with id {task_id} not found in the crew's tasks.")
|
||||
# Create a map of task ID to stored output
|
||||
stored_output_map: Dict[str, dict] = {
|
||||
log["task_id"]: log["output"] for log in stored_outputs
|
||||
}
|
||||
|
||||
task_outputs: List[
|
||||
TaskOutput
|
||||
] = [] # will propogate the old outputs first to add context then fill the content with the new task outputs relative to the replay start
|
||||
futures: List[Tuple[Task, Future[TaskOutput]]] = []
|
||||
context = ""
|
||||
|
||||
inputs = stored_outputs[start_index].get("inputs", {})
|
||||
if inputs is not None:
|
||||
self._interpolate_inputs(inputs)
|
||||
for task_index, task in enumerate(self.tasks):
|
||||
if task_index < start_index:
|
||||
# Use stored output for tasks before the replay point
|
||||
if task.id in stored_output_map:
|
||||
stored_output = stored_output_map[task.id]
|
||||
task_output = TaskOutput(
|
||||
description=stored_output["description"],
|
||||
raw_output=stored_output["raw_output"],
|
||||
pydantic_output=stored_output["pydantic_output"],
|
||||
json_output=stored_output["json_output"],
|
||||
agent=stored_output["agent"],
|
||||
)
|
||||
task_outputs.append(task_output)
|
||||
context += (
|
||||
f"\nTask {task_index + 1} Output:\n{task_output.raw_output}"
|
||||
)
|
||||
else:
|
||||
role = task.agent.role if task.agent is not None else "None"
|
||||
log_color = "bold_blue"
|
||||
self._logger.log(
|
||||
"debug", f"Replaying Working Agent: {role}", color=log_color
|
||||
)
|
||||
self._logger.log(
|
||||
"info",
|
||||
f"Replaying Task: {task.description}",
|
||||
color=log_color,
|
||||
)
|
||||
|
||||
if self.output_log_file:
|
||||
self._file_handler.log(
|
||||
agent=role, task=task.description, status="started"
|
||||
)
|
||||
# Execute task for replay and subsequent tasks
|
||||
if task.async_execution:
|
||||
future = task.execute_async(
|
||||
agent=task.agent, context=context, tools=task.tools
|
||||
)
|
||||
futures.append((task, future))
|
||||
else:
|
||||
if futures:
|
||||
async_outputs = self._process_async_tasks(
|
||||
futures, task_index, inputs
|
||||
)
|
||||
task_outputs.extend(async_outputs)
|
||||
for output in async_outputs:
|
||||
context += (
|
||||
f"\nTask {task_index + 1} Output:\n{output.raw_output}"
|
||||
)
|
||||
futures.clear()
|
||||
task_output = task.execute_sync(
|
||||
agent=task.agent, context=context, tools=task.tools
|
||||
)
|
||||
task_outputs.append(task_output)
|
||||
self._process_task_result(task, task_output)
|
||||
self._store_execution_log(task, task_output, task_index, inputs)
|
||||
context += (
|
||||
f"\nTask {task_index + 1} Output:\n{task_output.raw_output}"
|
||||
)
|
||||
|
||||
# Process any remaining async tasks
|
||||
if futures:
|
||||
async_outputs = self._process_async_tasks(futures, len(self.tasks), inputs)
|
||||
task_outputs.extend(async_outputs)
|
||||
# Calculate usage metrics
|
||||
token_usage = self.calculate_usage_metrics()
|
||||
|
||||
# Format and return the final output
|
||||
return self._format_output(task_outputs, token_usage)
|
||||
|
||||
def _load_stored_outputs(self) -> List[Dict]:
|
||||
try:
|
||||
with open(self._log_file, "r") as f:
|
||||
return json.load(f)
|
||||
except FileNotFoundError:
|
||||
self._logger.log(
|
||||
"warning",
|
||||
f"Log file {self._log_file} not found. Starting with empty logs.",
|
||||
)
|
||||
return []
|
||||
except json.JSONDecodeError:
|
||||
self._logger.log(
|
||||
"error",
|
||||
f"Failed to parse log file {self._log_file}. Starting with empty logs.",
|
||||
)
|
||||
return []
|
||||
|
||||
def save_execution_logs(self, filename: str | None = None):
|
||||
"""Save execution logs to a file."""
|
||||
if filename:
|
||||
self._log_file = filename
|
||||
try:
|
||||
with open(self._log_file, "w") as f:
|
||||
json.dump(self.execution_logs, f, indent=2, cls=CrewJSONEncoder)
|
||||
except Exception as e:
|
||||
self._logger.log("error", f"Failed to save execution logs: {str(e)}")
|
||||
|
||||
def load_execution_logs(self, filename: str | None = None):
|
||||
"""Load execution logs from a file."""
|
||||
if filename:
|
||||
self._log_file = filename
|
||||
try:
|
||||
with open(self._log_file, "r") as f:
|
||||
self.execution_logs = json.load(f)
|
||||
except FileNotFoundError:
|
||||
self._logger.log(
|
||||
"warning",
|
||||
f"Log file {self._log_file} not found. Starting with empty logs.",
|
||||
)
|
||||
self.execution_logs = []
|
||||
except json.JSONDecodeError:
|
||||
self._logger.log(
|
||||
"error",
|
||||
f"Failed to parse log file {self._log_file}. Starting with empty logs.",
|
||||
)
|
||||
self.execution_logs = []
|
||||
|
||||
def _run_hierarchical_process(self) -> Tuple[CrewOutput, Dict[str, Any]]:
|
||||
"""Creates and assigns a manager agent to make sure the crew completes the tasks."""
|
||||
i18n = I18N(prompt_file=self.prompt_file)
|
||||
if self.manager_agent is not None:
|
||||
self.manager_agent.allow_delegation = True
|
||||
manager = self.manager_agent
|
||||
if len(manager.tools) > 0:
|
||||
if manager.tools is not None and len(manager.tools) > 0:
|
||||
raise Exception("Manager agent should not have tools")
|
||||
manager.tools = self.manager_agent.get_delegation_tools(self.agents)
|
||||
else:
|
||||
@@ -597,7 +846,7 @@ class Crew(BaseModel):
|
||||
"""
|
||||
return CrewOutput(
|
||||
output=output,
|
||||
tasks_output=[task.output for task in self.tasks if task],
|
||||
tasks_output=[task.output for task in self.tasks if task and task.output],
|
||||
token_usage=token_usage,
|
||||
)
|
||||
|
||||
@@ -608,7 +857,6 @@ class Crew(BaseModel):
|
||||
agentops.end_session(
|
||||
end_state="Success",
|
||||
end_state_reason="Finished Execution",
|
||||
is_auto_end=True,
|
||||
)
|
||||
self._telemetry.end_crew(self, final_string_output)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict, List, Union
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@@ -18,7 +18,7 @@ class CrewOutput(BaseModel):
|
||||
# TODO: Ask @joao what is the desired behavior here
|
||||
def result(
|
||||
self,
|
||||
) -> List[str | BaseModel | Dict[str, Any]]]:
|
||||
) -> List[str | BaseModel | Dict[str, Any]]:
|
||||
"""Return the result of the task based on the available output."""
|
||||
results = [output.result() for output in self.output]
|
||||
return results
|
||||
|
||||
@@ -15,6 +15,7 @@ from crewai.agents.agent_builder.base_agent import BaseAgent
|
||||
from crewai.tasks.task_output import TaskOutput
|
||||
from crewai.telemetry.telemetry import Telemetry
|
||||
from crewai.utilities.converter import Converter, ConverterError
|
||||
from crewai.utilities.formatter import aggregate_raw_outputs_from_task_outputs
|
||||
from crewai.utilities.i18n import I18N
|
||||
from crewai.utilities.printer import Printer
|
||||
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
||||
@@ -158,6 +159,18 @@ class Task(BaseModel):
|
||||
)
|
||||
return self
|
||||
|
||||
def wait_for_completion(self) -> str | BaseModel:
|
||||
"""Wait for asynchronous task completion and return the output."""
|
||||
assert self.async_execution, "Task is not set to be executed asynchronously."
|
||||
|
||||
if self._future:
|
||||
self._future.result() # Wait for the future to complete
|
||||
self._future = None
|
||||
|
||||
assert self.output, "Task output is not set."
|
||||
|
||||
return self.output.exported_output
|
||||
|
||||
def execute_sync(
|
||||
self,
|
||||
agent: Optional[BaseAgent] = None,
|
||||
@@ -198,22 +211,22 @@ class Task(BaseModel):
|
||||
tools: Optional[List[Any]],
|
||||
) -> TaskOutput:
|
||||
"""Run the core execution logic of the task."""
|
||||
self._execution_span = self._telemetry.task_started(self)
|
||||
|
||||
agent = agent or self.agent
|
||||
if not agent:
|
||||
raise Exception(
|
||||
f"The task '{self.description}' has no agent assigned, therefore it can't be executed directly and should be executed in a Crew using a specific process that support that, like hierarchical."
|
||||
)
|
||||
|
||||
self._execution_span = self._telemetry.task_started(crew=agent.crew, task=self)
|
||||
|
||||
if self.context:
|
||||
context_list = []
|
||||
task_outputs: List[TaskOutput] = []
|
||||
for task in self.context:
|
||||
if task.async_execution and task._thread:
|
||||
task._thread.join()
|
||||
if task and task.output:
|
||||
context_list.append(task.output.raw_output)
|
||||
context = "\n".join(context_list)
|
||||
# if task.async_execution:
|
||||
# task.wait_for_completion()
|
||||
if task.output:
|
||||
task_outputs.append(task.output)
|
||||
context = aggregate_raw_outputs_from_task_outputs(task_outputs)
|
||||
|
||||
self.prompt_context = context
|
||||
tools = tools or self.tools
|
||||
@@ -289,7 +302,7 @@ class Task(BaseModel):
|
||||
copied_data = {k: v for k, v in copied_data.items() if v is not None}
|
||||
|
||||
cloned_context = (
|
||||
[task.copy() for task in self.context] if self.context else None
|
||||
[task.copy(agents) for task in self.context] if self.context else None
|
||||
)
|
||||
|
||||
def get_agent_by_role(role: str) -> Union["BaseAgent", None]:
|
||||
@@ -397,14 +410,12 @@ class Task(BaseModel):
|
||||
return isinstance(llm, ChatOpenAI) and llm.openai_api_base is None
|
||||
|
||||
def _save_file(self, result: Any) -> None:
|
||||
# type: ignore # Value of type variable "AnyOrLiteralStr" of "dirname" cannot be "str | None"
|
||||
directory = os.path.dirname(self.output_file)
|
||||
directory = os.path.dirname(self.output_file) # type: ignore # Value of type variable "AnyOrLiteralStr" of "dirname" cannot be "str | None"
|
||||
|
||||
if directory and not os.path.exists(directory):
|
||||
os.makedirs(directory)
|
||||
|
||||
# type: ignore # Argument 1 to "open" has incompatible type "str | None"; expected "int | str | bytes | PathLike[str] | PathLike[bytes]"
|
||||
with open(self.output_file, "w", encoding="utf-8") as file:
|
||||
with open(self.output_file, "w", encoding="utf-8") as file: # type: ignore # Argument 1 to "open" has incompatible type "str | None"; expected "int | str | bytes | PathLike[str] | PathLike[bytes]"
|
||||
file.write(result)
|
||||
return None
|
||||
|
||||
|
||||
@@ -156,18 +156,35 @@ class Telemetry:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def task_started(self, task: Task) -> Span | None:
|
||||
def task_started(self, crew: Crew, task: Task) -> Span | None:
|
||||
"""Records task started in a crew."""
|
||||
if self.ready:
|
||||
try:
|
||||
tracer = trace.get_tracer("crewai.telemetry")
|
||||
span = tracer.start_span("Task Execution")
|
||||
|
||||
created_span = tracer.start_span("Task Created")
|
||||
|
||||
self._add_attribute(created_span, "task_id", str(task.id))
|
||||
|
||||
if crew.share_crew:
|
||||
self._add_attribute(
|
||||
created_span, "formatted_description", task.description
|
||||
)
|
||||
self._add_attribute(
|
||||
created_span, "formatted_expected_output", task.expected_output
|
||||
)
|
||||
|
||||
created_span.set_status(Status(StatusCode.OK))
|
||||
created_span.end()
|
||||
|
||||
self._add_attribute(span, "task_id", str(task.id))
|
||||
self._add_attribute(span, "formatted_description", task.description)
|
||||
self._add_attribute(
|
||||
span, "formatted_expected_output", task.expected_output
|
||||
)
|
||||
|
||||
if crew.share_crew:
|
||||
self._add_attribute(span, "formatted_description", task.description)
|
||||
self._add_attribute(
|
||||
span, "formatted_expected_output", task.expected_output
|
||||
)
|
||||
|
||||
return span
|
||||
except Exception:
|
||||
|
||||
@@ -8,7 +8,7 @@ from pydantic.v1 import BaseModel, Field
|
||||
class ToolCalling(BaseModel):
|
||||
tool_name: str = Field(..., description="The name of the tool to be called.")
|
||||
arguments: Optional[Dict[str, Any]] = Field(
|
||||
..., description="A dictinary of arguments to be passed to the tool."
|
||||
..., description="A dictionary of arguments to be passed to the tool."
|
||||
)
|
||||
|
||||
|
||||
@@ -17,5 +17,5 @@ class InstructorToolCalling(PydanticBaseModel):
|
||||
..., description="The name of the tool to be called."
|
||||
)
|
||||
arguments: Optional[Dict[str, Any]] = PydanticField(
|
||||
..., description="A dictinary of arguments to be passed to the tool."
|
||||
..., description="A dictionary of arguments to be passed to the tool."
|
||||
)
|
||||
|
||||
@@ -11,11 +11,10 @@ from crewai.telemetry import Telemetry
|
||||
from crewai.tools.tool_calling import InstructorToolCalling, ToolCalling
|
||||
from crewai.utilities import I18N, Converter, ConverterError, Printer
|
||||
|
||||
agentops = None
|
||||
try:
|
||||
import agentops
|
||||
except ImportError:
|
||||
pass
|
||||
agentops = None
|
||||
|
||||
OPENAI_BIGGER_MODELS = ["gpt-4"]
|
||||
|
||||
@@ -51,6 +50,7 @@ class ToolUsage:
|
||||
tools_names: str,
|
||||
task: Any,
|
||||
function_calling_llm: Any,
|
||||
agent: Any,
|
||||
action: Any,
|
||||
) -> None:
|
||||
self._i18n: I18N = I18N()
|
||||
@@ -59,6 +59,7 @@ class ToolUsage:
|
||||
self._run_attempts: int = 1
|
||||
self._max_parsing_attempts: int = 3
|
||||
self._remember_format_after_usages: int = 3
|
||||
self.agent = agent
|
||||
self.tools_description = tools_description
|
||||
self.tools_names = tools_names
|
||||
self.tools_handler = tools_handler
|
||||
@@ -97,7 +98,7 @@ class ToolUsage:
|
||||
self.task.increment_tools_errors()
|
||||
self._printer.print(content=f"\n\n{error}\n", color="red")
|
||||
return error
|
||||
return f"{self._use(tool_string=tool_string, tool=tool, calling=calling)}" # type: ignore # BUG?: "_use" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
return f"{self._use(tool_string=tool_string, tool=tool, calling=calling)}" # type: ignore # BUG?: "_use" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
|
||||
def _use(
|
||||
self,
|
||||
@@ -105,8 +106,8 @@ class ToolUsage:
|
||||
tool: BaseTool,
|
||||
calling: Union[ToolCalling, InstructorToolCalling],
|
||||
) -> str: # TODO: Fix this return type
|
||||
tool_event = agentops.ToolEvent(name=calling.tool_name) if agentops else None
|
||||
if self._check_tool_repeated_usage(calling=calling): # type: ignore # _check_tool_repeated_usage of "ToolUsage" does not return a value (it only ever returns None)
|
||||
tool_event = agentops.ToolEvent(name=calling.tool_name) if agentops else None
|
||||
if self._check_tool_repeated_usage(calling=calling): # type: ignore # _check_tool_repeated_usage of "ToolUsage" does not return a value (it only ever returns None)
|
||||
try:
|
||||
result = self._i18n.errors("task_repeated_usage").format(
|
||||
tool_names=self.tools_names
|
||||
@@ -117,20 +118,24 @@ class ToolUsage:
|
||||
tool_name=tool.name,
|
||||
attempts=self._run_attempts,
|
||||
)
|
||||
result = self._format_result(result=result) # type: ignore # "_format_result" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
return result # type: ignore # Fix the reutrn type of this function
|
||||
result = self._format_result(result=result) # type: ignore # "_format_result" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
return result # type: ignore # Fix the return type of this function
|
||||
|
||||
except Exception:
|
||||
self.task.increment_tools_errors()
|
||||
|
||||
result = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str")
|
||||
result = None # type: ignore # Incompatible types in assignment (expression has type "None", variable has type "str")
|
||||
|
||||
if self.tools_handler.cache:
|
||||
result = self.tools_handler.cache.read( # type: ignore # Incompatible types in assignment (expression has type "str | None", variable has type "str")
|
||||
tool=calling.tool_name, input=calling.arguments
|
||||
)
|
||||
|
||||
if result is None: #! finecwg: if not result --> if result is None
|
||||
original_tool = next(
|
||||
(ot for ot in self.original_tools if ot.name == tool.name), None
|
||||
)
|
||||
|
||||
if result is None: #! finecwg: if not result --> if result is None
|
||||
try:
|
||||
if calling.tool_name in [
|
||||
"Delegate work to coworker",
|
||||
@@ -140,7 +145,7 @@ class ToolUsage:
|
||||
|
||||
if calling.arguments:
|
||||
try:
|
||||
acceptable_args = tool.args_schema.schema()["properties"].keys() # type: ignore # Item "None" of "type[BaseModel] | None" has no attribute "schema"
|
||||
acceptable_args = tool.args_schema.schema()["properties"].keys() # type: ignore # Item "None" of "type[BaseModel] | None" has no attribute "schema"
|
||||
arguments = {
|
||||
k: v
|
||||
for k, v in calling.arguments.items()
|
||||
@@ -152,7 +157,7 @@ class ToolUsage:
|
||||
arguments = calling.arguments
|
||||
result = tool._run(**arguments)
|
||||
else:
|
||||
arguments = calling.arguments.values() # type: ignore # Incompatible types in assignment (expression has type "dict_values[str, Any]", variable has type "dict[str, Any]")
|
||||
arguments = calling.arguments.values() # type: ignore # Incompatible types in assignment (expression has type "dict_values[str, Any]", variable has type "dict[str, Any]")
|
||||
result = tool._run(*arguments)
|
||||
else:
|
||||
result = tool._run()
|
||||
@@ -179,9 +184,6 @@ class ToolUsage:
|
||||
|
||||
if self.tools_handler:
|
||||
should_cache = True
|
||||
original_tool = next(
|
||||
(ot for ot in self.original_tools if ot.name == tool.name), None
|
||||
)
|
||||
if (
|
||||
hasattr(original_tool, "cache_function")
|
||||
and original_tool.cache_function # type: ignore # Item "None" of "Any | None" has no attribute "cache_function"
|
||||
@@ -201,14 +203,29 @@ class ToolUsage:
|
||||
llm=self.function_calling_llm,
|
||||
tool_name=tool.name,
|
||||
attempts=self._run_attempts,
|
||||
)
|
||||
result = self._format_result(result=result) # type: ignore # "_format_result" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
)
|
||||
result = self._format_result(result=result) # type: ignore # "_format_result" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
data = {
|
||||
"result": result,
|
||||
"tool_name": tool.name,
|
||||
"tool_args": calling.arguments,
|
||||
}
|
||||
|
||||
if (
|
||||
hasattr(original_tool, "result_as_answer")
|
||||
and original_tool.result_as_answer # type: ignore # Item "None" of "Any | None" has no attribute "cache_function"
|
||||
):
|
||||
result_as_answer = original_tool.result_as_answer # type: ignore # Item "None" of "Any | None" has no attribute "result_as_answer"
|
||||
data["result_as_answer"] = result_as_answer
|
||||
|
||||
self.agent.tools_results.append(data)
|
||||
|
||||
return result # type: ignore # No return value expected
|
||||
|
||||
def _format_result(self, result: Any) -> None:
|
||||
self.task.used_tools += 1
|
||||
if self._should_remember_format(): # type: ignore # "_should_remember_format" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
result = self._remember_format(result=result) # type: ignore # "_remember_format" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
result = self._remember_format(result=result) # type: ignore # "_remember_format" of "ToolUsage" does not return a value (it only ever returns None)
|
||||
return result
|
||||
|
||||
def _should_remember_format(self) -> None:
|
||||
@@ -303,7 +320,7 @@ class ToolUsage:
|
||||
Example:
|
||||
{"tool_name": "tool name", "arguments": {"arg_name1": "value", "arg_name2": 2}}""",
|
||||
),
|
||||
max_attemps=1,
|
||||
max_attempts=1,
|
||||
)
|
||||
calling = converter.to_pydantic()
|
||||
|
||||
|
||||
@@ -16,8 +16,8 @@
|
||||
"format_without_tools": "\nSorry, I didn't use the right format. I MUST either use a tool (among the available ones), OR give my best final answer.\nI just remembered the expected format I must follow:\n\nQuestion: the input question you must answer\nThought: you should always think about what to do\nAction: the action to take, should be one of [{tool_names}]\nAction Input: the input to the action\nObservation: the result of the action\n... (this Thought/Action/Action Input/Observation can repeat N times)\nThought: I now can give a great answer\nFinal Answer: my best complete final answer to the task\nYour final answer must be the great and the most complete as possible, it must be outcome described\n\n",
|
||||
"task_with_context": "{task}\n\nThis is the context you're working with:\n{context}",
|
||||
"expected_output": "\nThis is the expect criteria for your final answer: {expected_output} \n you MUST return the actual complete content as the final answer, not a summary.",
|
||||
"human_feedback": "You got human feedback on your work, re-avaluate it and give a new Final Answer when ready.\n {human_feedback}",
|
||||
"getting_input": "This is the agent final answer: {final_answer}\nPlease provide a feedback: "
|
||||
"human_feedback": "You got human feedback on your work, re-evaluate it and give a new Final Answer when ready.\n {human_feedback}",
|
||||
"getting_input": "This is the agent's final answer: {final_answer}\nPlease provide feedback: "
|
||||
},
|
||||
"errors": {
|
||||
"force_final_answer": "Tool won't be use because it's time to give your final answer. Don't use tools and just your absolute BEST Final answer.",
|
||||
|
||||
@@ -32,7 +32,7 @@ class Converter(OutputConverter):
|
||||
else:
|
||||
return self._create_chain().invoke({})
|
||||
except Exception as e:
|
||||
if current_attempt < self.max_attemps:
|
||||
if current_attempt < self.max_attempts:
|
||||
return self.to_pydantic(current_attempt + 1)
|
||||
return ConverterError(
|
||||
f"Failed to convert text into a pydantic model due to the following error: {e}"
|
||||
@@ -46,7 +46,7 @@ class Converter(OutputConverter):
|
||||
else:
|
||||
return json.dumps(self._create_chain().invoke({}).model_dump())
|
||||
except Exception:
|
||||
if current_attempt < self.max_attemps:
|
||||
if current_attempt < self.max_attempts:
|
||||
return self.to_json(current_attempt + 1)
|
||||
return ConverterError("Failed to convert text into JSON.")
|
||||
|
||||
@@ -56,7 +56,7 @@ class Converter(OutputConverter):
|
||||
|
||||
inst = Instructor(
|
||||
llm=self.llm,
|
||||
max_attemps=self.max_attemps,
|
||||
max_attempts=self.max_attempts,
|
||||
model=self.model,
|
||||
content=self.text,
|
||||
instructions=self.instructions,
|
||||
|
||||
17
src/crewai/utilities/crew_json_encoder.py
Normal file
17
src/crewai/utilities/crew_json_encoder.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from datetime import datetime
|
||||
import json
|
||||
from uuid import UUID
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class CrewJSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, UUID):
|
||||
return str(obj)
|
||||
if isinstance(obj, BaseModel):
|
||||
return obj.model_dump()
|
||||
if hasattr(obj, "__dict__"):
|
||||
return obj.__dict__
|
||||
return str(obj)
|
||||
@@ -5,9 +5,9 @@ from pydantic import BaseModel, Field
|
||||
|
||||
from crewai.utilities import Converter
|
||||
from crewai.utilities.pydantic_schema_parser import PydanticSchemaParser
|
||||
|
||||
agentops = None
|
||||
try:
|
||||
import agentops
|
||||
from agentops import track_agent
|
||||
except ImportError:
|
||||
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import os
|
||||
import pickle
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
from crewai.utilities.crew_json_encoder import CrewJSONEncoder
|
||||
|
||||
|
||||
class FileHandler:
|
||||
@@ -31,9 +34,8 @@ class PickleHandler:
|
||||
- file_name (str): The name of the file for saving and loading data.
|
||||
"""
|
||||
self.file_path = os.path.join(os.getcwd(), file_name)
|
||||
self._initialize_file()
|
||||
|
||||
def _initialize_file(self) -> None:
|
||||
def initialize_file(self) -> None:
|
||||
"""
|
||||
Initialize the file with an empty dictionary if it does not exist or is empty.
|
||||
"""
|
||||
@@ -67,3 +69,37 @@ class PickleHandler:
|
||||
return {} # Return an empty dictionary if the file is empty or corrupted
|
||||
except Exception:
|
||||
raise # Raise any other exceptions that occur during loading
|
||||
|
||||
|
||||
class TaskOutputJsonHandler:
|
||||
def __init__(self, file_name: str) -> None:
|
||||
self.file_path = os.path.join(os.getcwd(), file_name)
|
||||
|
||||
def initialize_file(self) -> None:
|
||||
if not os.path.exists(self.file_path) or os.path.getsize(self.file_path) == 0:
|
||||
with open(self.file_path, "w") as file:
|
||||
json.dump([], file)
|
||||
|
||||
def append(self, log) -> None:
|
||||
if not os.path.exists(self.file_path) or os.path.getsize(self.file_path) == 0:
|
||||
# Initialize the file with an empty list if it doesn't exist or is empty
|
||||
with open(self.file_path, "w") as file:
|
||||
json.dump([], file)
|
||||
with open(self.file_path, "r+") as file:
|
||||
try:
|
||||
file_data = json.load(file)
|
||||
except json.JSONDecodeError:
|
||||
# If the file contains invalid JSON, initialize it with an empty list
|
||||
file_data = []
|
||||
|
||||
file_data.append(log)
|
||||
file.seek(0)
|
||||
json.dump(file_data, file, indent=2, cls=CrewJSONEncoder)
|
||||
file.truncate()
|
||||
|
||||
def load(self) -> list:
|
||||
if not os.path.exists(self.file_path) or os.path.getsize(self.file_path) == 0:
|
||||
return []
|
||||
|
||||
with open(self.file_path, "r") as file:
|
||||
return json.load(file)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from datetime import datetime
|
||||
|
||||
from crewai.utilities.printer import Printer
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class Logger:
|
||||
_printer = Printer()
|
||||
|
||||
@@ -8,6 +8,8 @@ class Printer:
|
||||
self._print_bold_green(content)
|
||||
elif color == "bold_purple":
|
||||
self._print_bold_purple(content)
|
||||
elif color == "bold_blue":
|
||||
self._print_bold_blue(content)
|
||||
else:
|
||||
print(content)
|
||||
|
||||
@@ -22,3 +24,6 @@ class Printer:
|
||||
|
||||
def _print_red(self, content):
|
||||
print("\033[91m {}\033[00m".format(content))
|
||||
|
||||
def _print_bold_blue(self, content):
|
||||
print("\033[1m\033[94m {}\033[00m".format(content))
|
||||
|
||||
@@ -4,10 +4,6 @@ from unittest import mock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from langchain.tools import tool
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
from crewai import Agent, Crew, Task
|
||||
from crewai.agents.cache import CacheHandler
|
||||
from crewai.agents.executor import CrewAgentExecutor
|
||||
@@ -15,6 +11,9 @@ from crewai.agents.parser import CrewAgentParser
|
||||
from crewai.tools.tool_calling import InstructorToolCalling
|
||||
from crewai.tools.tool_usage import ToolUsage
|
||||
from crewai.utilities import RPMController
|
||||
from langchain.tools import tool
|
||||
from langchain_core.exceptions import OutputParserException
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
|
||||
def test_agent_creation():
|
||||
@@ -631,8 +630,8 @@ def test_agent_use_specific_tasks_output_as_context(capsys):
|
||||
|
||||
crew = Crew(agents=[agent1, agent2], tasks=tasks)
|
||||
result = crew.kickoff()
|
||||
assert "bye" not in result.lower()
|
||||
assert "hi" in result.lower() or "hello" in result.lower()
|
||||
assert "bye" not in result.raw_output().lower()
|
||||
assert "hi" in result.raw_output().lower() or "hello" in result.raw_output().lower()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -723,6 +722,74 @@ def test_agent_count_formatting_error():
|
||||
mock_count_errors.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_tool_result_as_answer_is_the_final_answer_for_the_agent():
|
||||
from crewai_tools import BaseTool
|
||||
|
||||
class MyCustomTool(BaseTool):
|
||||
name: str = "Get Greetings"
|
||||
description: str = "Get a random greeting back"
|
||||
|
||||
def _run(self) -> str:
|
||||
return "Howdy!"
|
||||
|
||||
agent1 = Agent(
|
||||
role="Data Scientist",
|
||||
goal="Product amazing resports on AI",
|
||||
backstory="You work with data and AI",
|
||||
tools=[MyCustomTool(result_as_answer=True)],
|
||||
)
|
||||
|
||||
essay = Task(
|
||||
description="Write and then review an small paragraph on AI until it's AMAZING. But first use the `Get Greetings` tool to get a greeting.",
|
||||
expected_output="The final paragraph with the full review on AI and no greeting.",
|
||||
agent=agent1,
|
||||
)
|
||||
tasks = [essay]
|
||||
crew = Crew(agents=[agent1], tasks=tasks)
|
||||
|
||||
result = crew.kickoff()
|
||||
print("RESULT: ", result.raw_output())
|
||||
assert result.raw_output() == "Howdy!"
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_tool_usage_information_is_appended_to_agent():
|
||||
from crewai_tools import BaseTool
|
||||
|
||||
class MyCustomTool(BaseTool):
|
||||
name: str = "Decide Greetings"
|
||||
description: str = "Decide what is the appropriate greeting to use"
|
||||
|
||||
def _run(self) -> str:
|
||||
return "Howdy!"
|
||||
|
||||
agent1 = Agent(
|
||||
role="Friendly Neighbor",
|
||||
goal="Make everyone feel welcome",
|
||||
backstory="You are the friendly neighbor",
|
||||
tools=[MyCustomTool(result_as_answer=True)],
|
||||
)
|
||||
|
||||
greeting = Task(
|
||||
description="Say an appropriate greeting.",
|
||||
expected_output="The greeting.",
|
||||
agent=agent1,
|
||||
)
|
||||
tasks = [greeting]
|
||||
crew = Crew(agents=[agent1], tasks=tasks)
|
||||
|
||||
crew.kickoff()
|
||||
assert agent1.tools_results == [
|
||||
{
|
||||
"result": "Howdy!",
|
||||
"tool_name": "Decide Greetings",
|
||||
"tool_args": {},
|
||||
"result_as_answer": True,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_agent_llm_uses_token_calc_handler_with_llm_has_model_name():
|
||||
agent1 = Agent(
|
||||
role="test role",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,314 +0,0 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"content": "You are test role. test backstory\nYour personal
|
||||
goal is: test goalTo give my best complete final answer to the task use the
|
||||
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
|
||||
my best complete final answer to the task.\nYour final answer must be the great
|
||||
and the most complete as possible, it must be outcome described.\n\nI MUST use
|
||||
these formats, my job depends on it!\nCurrent Task: just say hi!\n\nThis is
|
||||
the expect criteria for your final answer: your greeting \n you MUST return
|
||||
the actual complete content as the final answer, not a summary.\n\nBegin! This
|
||||
is VERY important to you, use the tools available and give your best Final Answer,
|
||||
your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '853'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.34.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.34.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
can"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
give"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
great"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
Hi"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvixvu2sSTjML4oN3fsoMeTHbew","object":"chat.completion.chunk","created":1719861882,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89c8c71ccad81823-ATL
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Mon, 01 Jul 2024 19:24:42 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=uU.2MR0L4Mv3xs4DzFlWOQLVId1dJXQBlWffhr9mqxU-1719861882-1.0.1.1-JSKN2_O9iYj8QCZjy0IGiunZxvXimz5Kzv5wQJedVua5E6WIl1UvP.wguXbK0cds7ayJReYnR8v8oAN2rmtnNQ;
|
||||
path=/; expires=Mon, 01-Jul-24 19:54:42 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=yc5Q7WKbO5zoiGNQx86HpHNM3HeXi2HxCxw31lL_UuU-1719861882665-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '86'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '15999808'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_25d95f35048bf71e28d73fbed6576a6c
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"content": "You are test role. test backstory\nYour personal
|
||||
goal is: test goalTo give my best complete final answer to the task use the
|
||||
exact following format:\n\nThought: I now can give a great answer\nFinal Answer:
|
||||
my best complete final answer to the task.\nYour final answer must be the great
|
||||
and the most complete as possible, it must be outcome described.\n\nI MUST use
|
||||
these formats, my job depends on it!\nCurrent Task: just say hello!\n\nThis
|
||||
is the expect criteria for your final answer: your greeting \n you MUST return
|
||||
the actual complete content as the final answer, not a summary.\n\nThis is the
|
||||
context you''re working with:\nHi!\n\nBegin! This is VERY important to you,
|
||||
use the tools available and give your best Final Answer, your job depends on
|
||||
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '905'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=uU.2MR0L4Mv3xs4DzFlWOQLVId1dJXQBlWffhr9mqxU-1719861882-1.0.1.1-JSKN2_O9iYj8QCZjy0IGiunZxvXimz5Kzv5wQJedVua5E6WIl1UvP.wguXbK0cds7ayJReYnR8v8oAN2rmtnNQ;
|
||||
_cfuvid=yc5Q7WKbO5zoiGNQx86HpHNM3HeXi2HxCxw31lL_UuU-1719861882665-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.34.0
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.34.0
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.12.3
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
can"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
give"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
great"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
Hello"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9gGvjRHciTPrlyXWRGu5z5C56L10c","object":"chat.completion.chunk","created":1719861883,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89c8c7202e0f1823-ATL
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Mon, 01 Jul 2024 19:24:43 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '82'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '15999794'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_754b5067e8f56d5c1182dc0f57be0e45
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
258
tests/cassettes/test_custom_converter_cls.yaml
Normal file
258
tests/cassettes/test_custom_converter_cls.yaml
Normal file
@@ -0,0 +1,258 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||
final answer to the task use the exact following format:\n\nThought: I now can
|
||||
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||
final answer must be the great and the most complete as possible, it must be
|
||||
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||
answer: The score of the title. \n you MUST return the actual complete content
|
||||
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||
use the tools available and give your best Final Answer, your job depends on
|
||||
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '997'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
can"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
give"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
great"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hrsMKHuOkxqftWK9DtuC10VCJ17t","object":"chat.completion.chunk","created":1720242230,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89ed0cf0dc05741a-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Sat, 06 Jul 2024 05:03:50 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=JI76H4xxreAnMx1JJoPragplAdYdjbDNA68Hr3Cs_0k-1720242230-1.0.1.1-oHSrtm.ejkvCiAHC11lg0MnvmopYZayTZRq09IcH2yh5BA6FyyufGH7Rm59BAz.gdZHc0izmjElXfLiu2bZ_jQ;
|
||||
path=/; expires=Sat, 06-Jul-24 05:33:50 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=X4.n0cNP9j1jseIPV4H1aDJu2xrsAwcUI8rY0tbLc40-1720242230210-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '71'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '15999772'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_8dc1d49d85fcf8e39601e32ca80abd6b
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '519'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=JI76H4xxreAnMx1JJoPragplAdYdjbDNA68Hr3Cs_0k-1720242230-1.0.1.1-oHSrtm.ejkvCiAHC11lg0MnvmopYZayTZRq09IcH2yh5BA6FyyufGH7Rm59BAz.gdZHc0izmjElXfLiu2bZ_jQ;
|
||||
_cfuvid=X4.n0cNP9j1jseIPV4H1aDJu2xrsAwcUI8rY0tbLc40-1720242230210-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA2xSS2/bMAy++1cIPNeF81pT34YBG9A13aHAgL5gKArtKJVFTaKBtkH++yDFi91g
|
||||
PggEP34PkN5nQoDeQClAbSWr1pn8euvD6id9OP3V3C6bzZ2qd7eT1f3DykgFF5FB6x0q/se6VNQ6
|
||||
g6zJHmHlUTJG1cnVtJjOp9NZkYCWNmgirXGczymPYF4s8smsJ25JKwxQiqdMCCH26Y0R7QbfoBRJ
|
||||
JnVaDEE2COVpSAjwZGIHZAg6sLQMFwOoyDLamNp2xowAJjKVksYMxsdvP6qHPUljqsI2duX13ePN
|
||||
w2/7HT/+8Lfd45cfYeR3lH53KVDdWXXazwg/9cszMyHAyjZx7xV5/NWx6/iMLgRI33QtWo7RYf8M
|
||||
IQ4/Qzk/wKfRQ/a/+qWvDqe1Gmqcp3U42xLU2uqwrTzKkNJCYHJHiyj3ks7XfboIOE+t44rpFW0U
|
||||
XPbXg+F/GcBFjzGxNCPOIuvjQXgPjG1Va9ugd16nU0LtqnlRLHG2vppcQ3bI/gIAAP//AwCtLU45
|
||||
0wIAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89ed0cf40ebc741a-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 06 Jul 2024 05:03:50 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '186'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '15999969'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_5da164d15ccb331864aeb5d3562969aa
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,82 +1,33 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: !!binary |
|
||||
Cp4ICiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS9QcKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRLeBwoQLUn1O+7mxvq8vHmZBVzP4hII4O2Vm5CqaQMqDENyZXcgQ3JlYXRlZDABOQj8
|
||||
EY+F/7gXQZhVFI+F/7gXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTYuM0oaCg5weXRob25fdmVy
|
||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQxNGEyZTE0YS0yZmU3LTQxMTEtYWI4OS05YTRl
|
||||
YmI2MmQ2OGFKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
||||
EgIYAUrJAgoLY3Jld19hZ2VudHMSuQIKtgJbeyJpZCI6ICIyNWE2NzNmNy1kM2NkLTQxMWYtOWFh
|
||||
Ni0wODZlY2IzMTgyODUiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogZmFs
|
||||
c2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAi
|
||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoYBCgpj
|
||||
cmV3X3Rhc2tzEngKdlt7ImlkIjogIjYxMjIyYjU0LTFlNDYtNDhlYy1hNTkwLTU3Y2VkNTI1OTE0
|
||||
OSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0
|
||||
b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRi
|
||||
aXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRh
|
||||
cndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6
|
||||
IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxF
|
||||
QVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYAQ==
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate, br
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1057'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.23.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Sat, 02 Mar 2024 16:30:10 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer. You''re an expert
|
||||
scorer, specialized in scoring titles.\nYour personal goal is: Score the titleTo
|
||||
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||
the task.\nYour final answer must be the great and the most complete as possible,
|
||||
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||
it!\n\nThought: \n\nCurrent Task: Give me an integer score between 1-5 for the
|
||||
following title: ''The impact of AI in the future of work''\n\nThis is the expect
|
||||
criteria for your final answer: The score of the title. \n you MUST return the
|
||||
actual complete content as the final answer, not a summary.\n\nBegin! This is
|
||||
VERY important to you, use the tools available and give your best Final Answer,
|
||||
your job depends on it!\n\nThought: \n"}], "model": "gpt-4", "n": 1, "stop":
|
||||
["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||
final answer to the task use the exact following format:\n\nThought: I now can
|
||||
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||
final answer must be the great and the most complete as possible, it must be
|
||||
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||
answer: The score of the title. \n you MUST return the actual complete content
|
||||
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||
use the tools available and give your best Final Answer, your job depends on
|
||||
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1012'
|
||||
- '997'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -86,7 +37,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -95,424 +46,64 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
impact"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
work"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
highly"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
relevant"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
today"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''s"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
world"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
where"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
artificial"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
intelligence"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
increasingly"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
becoming"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
part"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
our"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
daily"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
lives"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
It"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"-focused"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
topic"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
hint"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ing"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
at"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
exploration"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
how"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
could"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
potentially"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
transform"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
workforce"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
is"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
clear"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
concise"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"-pro"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"v"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"oking"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
However"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
it"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
could"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
be"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
more"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
engaging"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
if"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
it"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
posed"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
question"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
or"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
suggested"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
particular"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
stance"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
or"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
perspective"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
on"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
issue"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
would"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
can"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
give"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
score"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
great"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
out"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMXQdHZBcMpwt1od7QnLwjVHeuWA","object":"chat.completion.chunk","created":1709397008,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-9hEEAWWzwQR7GSV8yjob2TVqUZZFF","object":"chat.completion.chunk","created":1720089822,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -523,83 +114,76 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85e2c5046bf300ef-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
- 89de840c8999da1f-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Sat, 02 Mar 2024 16:30:08 GMT
|
||||
- Thu, 04 Jul 2024 10:43:42 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=.grddhTZJdLEJv6GuMnT78ns0mXkV0eSjYeNYhv3dUI-1709397008-1.0.1.1-2sKuVrpd4U3_FRXASU35ZcGL9Q2OZaWMHt3BdGQulV3dZKzn_JyPxnVdEkAM9jjGll2htywS.q3gux93Qh_RjA;
|
||||
path=/; expires=Sat, 02-Mar-24 17:00:08 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=2Hu0ME75.DNvnesykox1YodaKBclvB_2BKv5lSWfLl0-1720089822-1.0.1.1-4HrDYxtEuvqeonoEr_FZXY8l5Fn2Q1Z08vA.lJKhLWn1bRSsZ.FcJUbAeXPIPGvh6vlSidcfl2yXwKmVe5SyRQ;
|
||||
path=/; expires=Thu, 04-Jul-24 11:13:42 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=rikyw.atvG.At.eOQtg3heSMQKPGLGM66nq3EHbnUZ0-1709397008590-0.0.1.1-604800000;
|
||||
- _cfuvid=7l4V6F6gFvbHLEpNNJMf6OwfmVG.lHcwS8czqjpjDTY-1720089822487-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '250'
|
||||
- '120'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299768'
|
||||
- '15999772'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 46ms
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_c05576522c6a556b562ddcf905cd08ee
|
||||
- req_16b0326c59b800232bd3b81982efca66
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "I would give the title a score
|
||||
of 4 out of 5."}, {"role": "system", "content": "I''m gonna convert this raw
|
||||
text into valid JSON."}], "model": "gpt-4", "tool_choice": {"type": "function",
|
||||
"function": {"name": "ScoreOutput"}}, "tools": [{"type": "function", "function":
|
||||
{"name": "ScoreOutput", "description": "Correctly extracted `ScoreOutput` with
|
||||
all the required parameters with correct types", "parameters": {"properties":
|
||||
{"score": {"title": "Score", "type": "integer"}}, "required": ["score"], "type":
|
||||
"object"}}}]}'
|
||||
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '562'
|
||||
- '519'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=.grddhTZJdLEJv6GuMnT78ns0mXkV0eSjYeNYhv3dUI-1709397008-1.0.1.1-2sKuVrpd4U3_FRXASU35ZcGL9Q2OZaWMHt3BdGQulV3dZKzn_JyPxnVdEkAM9jjGll2htywS.q3gux93Qh_RjA;
|
||||
_cfuvid=rikyw.atvG.At.eOQtg3heSMQKPGLGM66nq3EHbnUZ0-1709397008590-0.0.1.1-604800000
|
||||
- __cf_bm=2Hu0ME75.DNvnesykox1YodaKBclvB_2BKv5lSWfLl0-1720089822-1.0.1.1-4HrDYxtEuvqeonoEr_FZXY8l5Fn2Q1Z08vA.lJKhLWn1bRSsZ.FcJUbAeXPIPGvh6vlSidcfl2yXwKmVe5SyRQ;
|
||||
_cfuvid=7l4V6F6gFvbHLEpNNJMf6OwfmVG.lHcwS8czqjpjDTY-1720089822487-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -609,7 +193,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -619,60 +203,55 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
ISALACDGO+f7GHXv9AhjPyCk5VBp4/m/Gbg0aA+KxtQ2/dItGPYCmVqiCQTU2dk6F7c79HpkTXh2
|
||||
KwCmASWokr9aGa/bs/XV6+Pt6bp5uB3qfBBfP/THr8YsL5amemJLAHT/WajqnnjHxUBNnYUgJ5ef
|
||||
hQEl+tPefDif9vp94R3jglBTgrGv26N2b9If9jpNXKrCihIfAgC2pRHAd++7lOi1WhEvDIES+BHA
|
||||
0umQEvyrqrSq/2zNFiAXtm5SwjZaC+dq5/SP+tOaHMrBbT23SH/90/rn7jw6mL6a2/fL/k3vcRXN
|
||||
3o7WVeFPOCH1zNoPCvXAKMWgsjpJGAFo/8yE3npQrgxvmto3tQoDoIl/TwluPy3wyUq5MvykxOjT
|
||||
7qm6cS/q2Bc9vqdYRu1iX7r/asFwRq0Dfm4IG6MEq9p5GmsvgC9LVhvjXKEvnfH1T+3y0FaUmPdV
|
||||
AtwzeQCccj88kBOZ/V5PjJFgGIGVnyi1cVj6MjWrLPbCAAM=
|
||||
H4sIAAAAAAAAA2xSS2vjMBC++1eIOcfFtZNt4lsXCkuhj20he9gWo8hjR4msEZLcJoT890WOG7th
|
||||
fRDDfPM9mPEhYgxkCTkDseZeNEbFi/Xd3a1Ltj/Sj336uXxpVLmp6Zd8XFYPTzAJDFptUPgv1pWg
|
||||
xij0kvQJFha5x6B6fZMmyXwxT9MOaKhEFWi18fGU4jRJp3Eyi6+znrgmKdBBzv5GjDF26N4QUZe4
|
||||
g5wlk69Og87xGiE/DzEGllToAHdOOs+1h8kACtIedUitW6VGgCdSheBKDcan7zCqhz1xpQr7vN9k
|
||||
7a583NHvlxXOtvXzzz+3y/uR30l6b7pAVavFeT8j/NzPL8wYA82bjvsqyOJT603rL+iMAbd126D2
|
||||
IToc3sCF4TfIp0f4NnqM/le/99XxvFZFtbG0chdbgkpq6daFRe66tOA8mZNFkHvvztd+uwgYS43x
|
||||
hact6iA4768Hw/8ygLMe8+S5GnFmUR8P3N55bIpK6hqtsbI7JVSmEJjcLLIsSyqIjtE/AAAA//8D
|
||||
ANYZ+TrTAgAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85e2c51b9f7900ef-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
- 89de84103ab8da1f-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 02 Mar 2024 16:30:12 GMT
|
||||
- Thu, 04 Jul 2024 10:43:43 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '518'
|
||||
- '235'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299958'
|
||||
- '15999969'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 8ms
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_6cf57525fad025cb1fe9bddb5d669312
|
||||
- req_5d283f799cb8d11c8280f1c07e4132a1
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,82 +1,33 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: !!binary |
|
||||
Cp4ICiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS9QcKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRLeBwoQbcSgVp+c/HPtPueafW+iDBII6Vtopi7FQgAqDENyZXcgQ3JlYXRlZDABOcjB
|
||||
8hWO/7gXQRAt9BWO/7gXShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMTYuM0oaCg5weXRob25fdmVy
|
||||
c2lvbhIICgYzLjExLjdKMQoHY3Jld19pZBImCiQ1OGI3YjUzMS1jYmQxLTQ5Y2UtOGFjOC0wNTkz
|
||||
NWJlYjdlNGVKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKFQoNY3Jld19sYW5ndWFnZRIE
|
||||
CgJlbkoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRz
|
||||
EgIYAUrJAgoLY3Jld19hZ2VudHMSuQIKtgJbeyJpZCI6ICI1NTk2M2UzNS02NjU4LTRmMmItOTJi
|
||||
My04ZDE0MGNhMDYwOGUiLCAicm9sZSI6ICJTY29yZXIiLCAibWVtb3J5X2VuYWJsZWQ/IjogZmFs
|
||||
c2UsICJ2ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAxNSwgIm1heF9ycG0iOiBudWxsLCAi
|
||||
aTE4biI6ICJlbiIsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJn
|
||||
cHQtNFwiLCBcInRlbXBlcmF0dXJlXCI6IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0i
|
||||
LCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoYBCgpj
|
||||
cmV3X3Rhc2tzEngKdlt7ImlkIjogIjc1OWM0ZGYwLTI4MWUtNDNlMy05Yzc0LWNmMDg3NTczZTJi
|
||||
YSIsICJhc3luY19leGVjdXRpb24/IjogZmFsc2UsICJhZ2VudF9yb2xlIjogIlNjb3JlciIsICJ0
|
||||
b29sc19uYW1lcyI6IFtdfV1KKAoIcGxhdGZvcm0SHAoabWFjT1MtMTQuMy1hcm02NC1hcm0tNjRi
|
||||
aXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4zLjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRh
|
||||
cndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwgVmVyc2lvbiAyMy4zLjA6
|
||||
IFdlZCBEZWMgMjAgMjE6MzA6NTkgUFNUIDIwMjM7IHJvb3Q6eG51LTEwMDAyLjgxLjV+Ny9SRUxF
|
||||
QVNFX0FSTTY0X1Q2MDMwSgoKBGNwdXMSAhgMegIYAQ==
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate, br
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '1057'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.23.0
|
||||
method: POST
|
||||
uri: http://telemetry.crewai.com:4318/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Sat, 02 Mar 2024 16:30:45 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer. You''re an expert
|
||||
scorer, specialized in scoring titles.\nYour personal goal is: Score the titleTo
|
||||
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||
the task.\nYour final answer must be the great and the most complete as possible,
|
||||
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||
it!\n\nThought: \n\nCurrent Task: Give me an integer score between 1-5 for the
|
||||
following title: ''The impact of AI in the future of work''\n\nThis is the expect
|
||||
criteria for your final answer: The score of the title. \n you MUST return the
|
||||
actual complete content as the final answer, not a summary.\n\nBegin! This is
|
||||
VERY important to you, use the tools available and give your best Final Answer,
|
||||
your job depends on it!\n\nThought: \n"}], "model": "gpt-4", "n": 1, "stop":
|
||||
["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||
final answer to the task use the exact following format:\n\nThought: I now can
|
||||
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||
final answer must be the great and the most complete as possible, it must be
|
||||
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||
answer: The score of the title. \n you MUST return the actual complete content
|
||||
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||
use the tools available and give your best Final Answer, your job depends on
|
||||
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1012'
|
||||
- '997'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -86,7 +37,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -95,265 +46,64 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Considering"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
relevance"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
topic"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
straightforward"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ness"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
its"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
ability"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
capt"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"ivate"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
attention"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
due"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
current"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
global"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
interest"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
its"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
implications"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
would"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
can"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
give"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
it"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
high"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
great"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
score"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
impact"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
work"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
scores"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
out"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-8yMY0ocCeiaktjmCcObeikdUZPfLi","object":"chat.completion.chunk","created":1709397044,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-9hED3nsqYzSiiorjVCmuS96jevbba","object":"chat.completion.chunk","created":1720089753,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -364,60 +114,53 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85e2c5e93ad900d7-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
- 89de825e5af34c0d-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Sat, 02 Mar 2024 16:30:45 GMT
|
||||
- Thu, 04 Jul 2024 10:42:33 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=29uZO.hVYKS5gYK5X6QpUY7F78cYAZS5Re6SlF1k0IE-1709397045-1.0.1.1-0R8xzeW3JkesWRs7meP9a8me.3lSCf1DoFGhgIP2AQEe8B6hF98sBDkh4JwHVXhf7vAWVGSdnfsBCbZpuXm8wQ;
|
||||
path=/; expires=Sat, 02-Mar-24 17:00:45 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=XNWuPzzvDCR6vj8X4pwaZq_1zuK8TwGTpIqHQc0EbWw-1720089753-1.0.1.1-f61Hw2P4yRgm8mOUN2RhRrvndJQwdxwAS5T8bsfbqXLXSlbSKQONzTKvwOzVDnhHR3gy56nDVq.uAOE1cvvDDQ;
|
||||
path=/; expires=Thu, 04-Jul-24 11:12:33 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=NyDbNO5ZfRQYnj5YFoyLtS.qoTk641EufpijridgyHo-1709397045493-0.0.1.1-604800000;
|
||||
- _cfuvid=ZIy1L3HZwWapuY1KTKhqiOCKReYrjZwlhU2BUCsEpUs-1720089753602-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '234'
|
||||
- '135'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299768'
|
||||
- '15999772'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 46ms
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_42d5dbd362d2ba837a2d9a0303d93854
|
||||
- req_ae342ee6e026c54b420e69ccb8235272
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "The title ''The impact of AI
|
||||
in the future of work'' scores a 4 out of 5."}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4", "tool_choice":
|
||||
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
@@ -427,20 +170,20 @@ interactions:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '588'
|
||||
- '519'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=29uZO.hVYKS5gYK5X6QpUY7F78cYAZS5Re6SlF1k0IE-1709397045-1.0.1.1-0R8xzeW3JkesWRs7meP9a8me.3lSCf1DoFGhgIP2AQEe8B6hF98sBDkh4JwHVXhf7vAWVGSdnfsBCbZpuXm8wQ;
|
||||
_cfuvid=NyDbNO5ZfRQYnj5YFoyLtS.qoTk641EufpijridgyHo-1709397045493-0.0.1.1-604800000
|
||||
- __cf_bm=XNWuPzzvDCR6vj8X4pwaZq_1zuK8TwGTpIqHQc0EbWw-1720089753-1.0.1.1-f61Hw2P4yRgm8mOUN2RhRrvndJQwdxwAS5T8bsfbqXLXSlbSKQONzTKvwOzVDnhHR3gy56nDVq.uAOE1cvvDDQ;
|
||||
_cfuvid=ZIy1L3HZwWapuY1KTKhqiOCKReYrjZwlhU2BUCsEpUs-1720089753602-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.12.0
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -450,7 +193,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.12.0
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -460,60 +203,55 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
ISALACBGzZmPUTurS3j2kVTLAT6a5FL++b8ZuDRoD4rG1Db90i0Y9gKZWqIJBNTZ2ToXtzv0emRN
|
||||
ePbqAcwjCjDMfBcqK9uz88diNH4LvscP964vn+/L7+hu+vN6NvL0w5YH0ARFHLqeeMfFQM2NhiAn
|
||||
l5/FEQX60958OJ/2RjPhHWWiWFKAqXXtUbs36Q97nWYmD+OaAisPAK6lEcB377sU6LVaES8MgQL4
|
||||
EcDKyJgC9Os6r52vHVuAXNi6SQHdSCmcc8bIXehLSQ7l4LWeW6S/+lLuksPx1H/5/SlN9tw/x+PH
|
||||
sa9/Xh5OnJB65mwHhXpglGJQWZ0gjADUvprQW3+hqeKvxtnGqTAAmvj3FOB1rYE169BU8ZoCo7X+
|
||||
p+rGf6+Obejxf4pllCa1lQnqBcMZtQ7Y3RA2RgHWzlga698DNpasNsa5QlsZZd3OmTLWNQXmM5UA
|
||||
90weAKfcDw/kRGa/N/XGSDCMwMouyXUaV7bKzSp7/54BAw==
|
||||
H4sIAAAAAAAAA2xSS2/bMAy++1cIPMeDm9pN49se3YYVWwKsww5rYagy7biTRUGit6RB/vsgx43d
|
||||
YD4IBD9+D5DeR0JAU0IuQG0kq9bqeLm5+ZDWqvz4nJZ/7nbrBOV2/c0/+XdvP2uYBQY9PqHiF9Yb
|
||||
Ra3VyA2ZI6wcSsagerGYJ8n1cpGlPdBSiTrQastxSvE8madxksUXlwNxQ41CD7n4FQkhxL5/Q0RT
|
||||
4hZykcxeOi16L2uE/DQkBDjSoQPS+8azNAyzEVRkGE1IbTqtJwAT6UJJrUfj47ef1OOepNbF8uuP
|
||||
9ep2O7+6/Zvebd//VKvdzacvyk78jtI72weqOqNO+5ngp35+ZiYEGNn23O+KHK46th2f0YUA6equ
|
||||
RcMhOuzvwYfhe8jTA7waPUT/qx+G6nBaq6baOnr0Z1uCqjGN3xQOpe/TgmeyR4sg99Cfr3t1EbCO
|
||||
WssF0280QfB6uB6M/8sIZgPGxFJPOFk0xAO/84xtUTWmRmdd058SKluU2eLqMllUywSiQ/QPAAD/
|
||||
/wMAo1FYu9MCAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 85e2c5fe7ccc00d7-GRU
|
||||
Cache-Control:
|
||||
- no-cache, must-revalidate
|
||||
- 89de8261dc6c4c0d-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Sat, 02 Mar 2024 16:30:48 GMT
|
||||
- Thu, 04 Jul 2024 10:42:34 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- gpt-4-0613
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '516'
|
||||
- '193'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299951'
|
||||
- '15999969'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 9ms
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_f5cc008a441e72f5c41ca547cac30f03
|
||||
- req_7cf743796bf4a52626b923135ff8f936
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
@@ -1,168 +1,33 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: !!binary |
|
||||
CqcuCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkS/i0KEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRKNAQoQWsK2m2g3VyJHWaXnQym/VhIII86T/ZrUN8EqClRvb2wgVXNhZ2UwATnYaVBJ
|
||||
jjbPF0FYJVFJjjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHwoJdG9vbF9uYW1lEhIK
|
||||
EGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYARKNAQoQ6Z32Q3lxb6RU5NbYTjLU
|
||||
SxIIqcY2PQT6l3gqClRvb2wgVXNhZ2UwATnAus9KjjbPF0EYU9BKjjbPF0oaCg5jcmV3YWlfdmVy
|
||||
c2lvbhIICgYwLjMwLjZKHwoJdG9vbF9uYW1lEhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1w
|
||||
dHMSAhgBegIYARKNAQoQwek7gO4ckm9Jvn6BwHW9QxII3T6AbeLq7EIqClRvb2wgVXNhZ2UwATlY
|
||||
cF9PjjbPF0GoM2BPjjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHwoJdG9vbF9uYW1l
|
||||
EhIKEGdldF9maW5hbF9hbnN3ZXJKDgoIYXR0ZW1wdHMSAhgBegIYARLKAQoQxj4ww0iUXwfHLphC
|
||||
57m2khIIjJFrK75dr+gqEFRvb2wgVXNhZ2UgRXJyb3IwATlQt3hRjjbPF0GIfnlRjjbPF0oaCg5j
|
||||
cmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKZgoDbGxtEl8KXXsibmFtZSI6IG51bGwsICJtb2RlbF9u
|
||||
YW1lIjogImdwdC00LTAxMjUtcHJldmlldyIsICJ0ZW1wZXJhdHVyZSI6IDAuNywgImNsYXNzIjog
|
||||
IkNoYXRPcGVuQUkifXoCGAESygEKEArpOi/Us2A3PpB8Gii7j88SCPzLVYFtDQSsKhBUb29sIFVz
|
||||
YWdlIEVycm9yMAE56MF6Uo42zxdBaH17Uo42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42
|
||||
SmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAibW9kZWxfbmFtZSI6ICJncHQtNC0wMTI1LXByZXZp
|
||||
ZXciLCAidGVtcGVyYXR1cmUiOiAwLjcsICJjbGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEsoBChBt
|
||||
yk7Qq7Sgw6WMHkcEj6WfEgijl4wq3/wt7SoQVG9vbCBVc2FnZSBFcnJvcjABOeAyk1OONs8XQdja
|
||||
k1OONs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzAuNkpmCgNsbG0SXwpdeyJuYW1lIjogbnVs
|
||||
bCwgIm1vZGVsX25hbWUiOiAiZ3B0LTQtMDEyNS1wcmV2aWV3IiwgInRlbXBlcmF0dXJlIjogMC43
|
||||
LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARJiChAmBkQ6MDSlxvmv4YVRGzmdEgj1PJQNKV6C
|
||||
sCoQVG9vbCBVc2FnZSBFcnJvcjABOfgzEleONs8XQQjYEleONs8XShoKDmNyZXdhaV92ZXJzaW9u
|
||||
EggKBjAuMzAuNnoCGAESjQEKEAwZ3MFe3tvKT/3fZgpV4ywSCEicyz0QPrdVKgpUb29sIFVzYWdl
|
||||
MAE5CPdvWI42zxdBWLpwWI42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42Sh8KCXRvb2xf
|
||||
bmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAESjQEKEGIhNyFGf0X0
|
||||
iubEVUn96JASCCPQkQmTahyuKgpUb29sIFVzYWdlMAE5gFEoWo42zxdBgEspWo42zxdKGgoOY3Jl
|
||||
d2FpX3ZlcnNpb24SCAoGMC4zMC42Sh8KCXRvb2xfbmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4K
|
||||
CGF0dGVtcHRzEgIYAXoCGAESjQEKECluqAOjM96Xcuo2pY/AxrUSCE2CDrSmkuFVKgpUb29sIFVz
|
||||
YWdlMAE5YJvZW442zxdBoLTaW442zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42Sh8KCXRv
|
||||
b2xfbmFtZRISChBnZXRfZmluYWxfYW5zd2VySg4KCGF0dGVtcHRzEgIYAXoCGAESYgoQ3rypXzEG
|
||||
OaBuogXtWu23OxIIJU2tAn4rSesqEFRvb2wgVXNhZ2UgRXJyb3IwATmAst9fjjbPF0GgfeBfjjbP
|
||||
F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZ6AhgBEpcBChCoJakn4gJ/bsmLqhOQ+pzJEggZ
|
||||
FE39y55kSSoKVG9vbCBVc2FnZTABOdAenmKONs8XQdAYn2KONs8XShoKDmNyZXdhaV92ZXJzaW9u
|
||||
EggKBjAuMzAuNkopCgl0b29sX25hbWUSHAoaRGVsZWdhdGUgd29yayB0byBjby13b3JrZXJKDgoI
|
||||
YXR0ZW1wdHMSAhgBegIYARKWAQoQbG8DmDglapO/4qWaAMeMhhII+ZHmV8V3pYsqClRvb2wgVXNh
|
||||
Z2UwATlwxGFojjbPF0Goi2JojjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKKAoJdG9v
|
||||
bF9uYW1lEhsKGUFzayBxdWVzdGlvbiB0byBjby13b3JrZXJKDgoIYXR0ZW1wdHMSAhgBegIYARKL
|
||||
AQoQA3elhWektmDyQtq61PMfARIInyy2MM6Y3gwqClRvb2wgVXNhZ2UwATnoK/drjjbPF0EI9/dr
|
||||
jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJuX2Fi
|
||||
b3V0X0FJSg4KCGF0dGVtcHRzEgIYAXoCGAESiwEKEGOUc3nLZT3TTF1UqTEf3aYSCAIG5j6E1JLk
|
||||
KgpUb29sIFVzYWdlMAE5wIxYbY42zxdBuDRZbY42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4z
|
||||
MC42Sh0KCXRvb2xfbmFtZRIQCg5sZWFybl9hYm91dF9BSUoOCghhdHRlbXB0cxICGAF6AhgBEmIK
|
||||
ELpJQh4iRyb0LtlYOjZkn+8SCCPJgujx+G0+KhBUb29sIFVzYWdlIEVycm9yMAE5ULmabo42zxdB
|
||||
wE2bbo42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42egIYARLzAQoQFQ74Wok+KKV4rR5E
|
||||
kSYVMRIIJRc3G0tgxY4qClRvb2wgVXNhZ2UwATlYx7FzjjbPF0EYorJzjjbPF0oaCg5jcmV3YWlf
|
||||
dmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJuX2Fib3V0X0FJSg4KCGF0dGVt
|
||||
cHRzEgIYAUpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10
|
||||
dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIY
|
||||
ARLzAQoQnJMORTVCM7CSablsUBxMJxIIQCKuv5AXvFEqClRvb2wgVXNhZ2UwATl4LDp1jjbPF0EQ
|
||||
5Dp1jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJu
|
||||
X2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVs
|
||||
X25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3Mi
|
||||
OiAiQ2hhdE9wZW5BSSJ9egIYARLzAQoQAfw9BLcOs9bjmKBOkNP2pRII8jFQggGaAhsqClRvb2wg
|
||||
VXNhZ2UwATlAbQJ3jjbPF0HwIAN3jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJ
|
||||
dG9vbF9uYW1lEhAKDmxlYXJuX2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJu
|
||||
YW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0
|
||||
dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARKXAQoQuzP2zqalo852qSEZJPoW
|
||||
/RIInedZySqvqygqClRvb2wgVXNhZ2UwATkYAGWujjbPF0Go4mWujjbPF0oaCg5jcmV3YWlfdmVy
|
||||
c2lvbhIICgYwLjMwLjZKKQoJdG9vbF9uYW1lEhwKGkRlbGVnYXRlIHdvcmsgdG8gY28td29ya2Vy
|
||||
Sg4KCGF0dGVtcHRzEgIYAXoCGAESlwEKEATlyOCYjHSp1K3QoCeqMmUSCP/SCa7gOMy+KgpUb29s
|
||||
IFVzYWdlMAE5QKeKuY42zxdBqGaLuY42zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42SikK
|
||||
CXRvb2xfbmFtZRIcChpEZWxlZ2F0ZSB3b3JrIHRvIGNvLXdvcmtlckoOCghhdHRlbXB0cxICGAF6
|
||||
AhgBEpcBChDCNHUdgM9SPykW9jZLAszpEgjxF3ox7N4UJyoKVG9vbCBVc2FnZTABOeDPiMqONs8X
|
||||
QdCiicqONs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzAuNkopCgl0b29sX25hbWUSHAoaRGVs
|
||||
ZWdhdGUgd29yayB0byBjby13b3JrZXJKDgoIYXR0ZW1wdHMSAhgBegIYARKHAQoQ79rs2SVt97mT
|
||||
uX/cWDscnBII6rrlGA8RtbMqClRvb2wgVXNhZ2UwATkYFcz6jjbPF0FQ3Mz6jjbPF0oaCg5jcmV3
|
||||
YWlfdmVyc2lvbhIICgYwLjMwLjZKGQoJdG9vbF9uYW1lEgwKCm11bHRpcGxpZXJKDgoIYXR0ZW1w
|
||||
dHMSAhgBegIYARKHAQoQVP9tyrp7Wv9IolbUgao9uBII8inECsdSecYqClRvb2wgVXNhZ2UwATl4
|
||||
JZH8jjbPF0GIyZH8jjbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKGQoJdG9vbF9uYW1l
|
||||
EgwKCm11bHRpcGxpZXJKDgoIYXR0ZW1wdHMSAhgBegIYARJiChCQ/ikD6wODQRRK4sOy00xbEgh3
|
||||
W3X41OAJQSoQVG9vbCBVc2FnZSBFcnJvcjABOXi4pf+ONs8XQeB3pv+ONs8XShoKDmNyZXdhaV92
|
||||
ZXJzaW9uEggKBjAuMzAuNnoCGAESYgoQ53JWAKq1hJb9rscUFhC6GhIIPzTRaeMJnJYqEFRvb2wg
|
||||
VXNhZ2UgRXJyb3IwATkIPJsAjzbPF0GQzJsAjzbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMw
|
||||
LjZ6AhgBEvMBChC/rx/MIKAdxGOMAfLmXNJoEghCqOptNDfKXSoKVG9vbCBVc2FnZTABOaAAQwuP
|
||||
Ns8XQejuQwuPNs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAuMzAuNkodCgl0b29sX25hbWUSEAoO
|
||||
bGVhcm5fYWJvdXRfQUlKDgoIYXR0ZW1wdHMSAhgBSmYKA2xsbRJfCl17Im5hbWUiOiBudWxsLCAi
|
||||
bW9kZWxfbmFtZSI6ICJncHQtMy41LXR1cmJvLTAxMjUiLCAidGVtcGVyYXR1cmUiOiAwLjcsICJj
|
||||
bGFzcyI6ICJDaGF0T3BlbkFJIn16AhgBEsoBChAJc/cdYOh6bOxXLE4BSMa0Egg1m5FL4NyjeCoQ
|
||||
VG9vbCBVc2FnZSBFcnJvcjABOXiAigyPNs8XQZhLiwyPNs8XShoKDmNyZXdhaV92ZXJzaW9uEggK
|
||||
BjAuMzAuNkpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10
|
||||
dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIY
|
||||
ARLzAQoQIwsE8AeUGx2Zlpsj/8cIpxIIGiYkqh1O8V8qClRvb2wgVXNhZ2UwATmIou0NjzbPF0EI
|
||||
Xu4NjzbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJdG9vbF9uYW1lEhAKDmxlYXJu
|
||||
X2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJuYW1lIjogbnVsbCwgIm1vZGVs
|
||||
X25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0dXJlIjogMC43LCAiY2xhc3Mi
|
||||
OiAiQ2hhdE9wZW5BSSJ9egIYARLzAQoQzE4sWSyDKyW+MsjMu3qaURII0Ek+EFuG+pQqClRvb2wg
|
||||
VXNhZ2UwATl4knUPjzbPF0FYPnYPjzbPF0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKHQoJ
|
||||
dG9vbF9uYW1lEhAKDmxlYXJuX2Fib3V0X0FJSg4KCGF0dGVtcHRzEgIYAUpmCgNsbG0SXwpdeyJu
|
||||
YW1lIjogbnVsbCwgIm1vZGVsX25hbWUiOiAiZ3B0LTMuNS10dXJiby0wMTI1IiwgInRlbXBlcmF0
|
||||
dXJlIjogMC43LCAiY2xhc3MiOiAiQ2hhdE9wZW5BSSJ9egIYARKIAQoQV5Ebj7z0WSmnDL6cuEwc
|
||||
aRIIjQwTYaUnU+0qClRvb2wgVXNhZ2UwATmoPboXjzbPF0Eo+boXjzbPF0oaCg5jcmV3YWlfdmVy
|
||||
c2lvbhIICgYwLjMwLjZKGgoJdG9vbF9uYW1lEg0KC3JldHVybl9kYXRhSg4KCGF0dGVtcHRzEgIY
|
||||
AXoCGAESlwEKEC6MfuEYt3qySHDPP/fGhDoSCME60nPc2PP5KgpUb29sIFVzYWdlMAE5kBWpIo82
|
||||
zxdBmOSpIo82zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42SikKCXRvb2xfbmFtZRIcChpE
|
||||
ZWxlZ2F0ZSB3b3JrIHRvIGNvLXdvcmtlckoOCghhdHRlbXB0cxICGAF6AhgBEo8BChAW4yhha9hM
|
||||
Bq622l4ljFgrEgiqbqTTWlbblCoKVG9vbCBVc2FnZTABOcgCViiPNs8XQVjlViiPNs8XShoKDmNy
|
||||
ZXdhaV92ZXJzaW9uEggKBjAuMzAuNkohCgl0b29sX25hbWUSFAoSbXVsdGlwbGNhdGlvbl90b29s
|
||||
Sg4KCGF0dGVtcHRzEgIYAXoCGAESjwEKEDMq2VDCq4/7QaQhcHACuUQSCN8bD12jrjruKgpUb29s
|
||||
IFVzYWdlMAE5OAsOKo82zxdBSK8OKo82zxdKGgoOY3Jld2FpX3ZlcnNpb24SCAoGMC4zMC42SiEK
|
||||
CXRvb2xfbmFtZRIUChJtdWx0aXBsY2F0aW9uX3Rvb2xKDgoIYXR0ZW1wdHMSAhgBegIYARKPAQoQ
|
||||
7Q6do5ZLFG1iUnTRbk8ZOBIISweZORdlmm8qClRvb2wgVXNhZ2UwATngLc4rjzbPF0HY1c4rjzbP
|
||||
F0oaCg5jcmV3YWlfdmVyc2lvbhIICgYwLjMwLjZKIQoJdG9vbF9uYW1lEhQKEm11bHRpcGxjYXRp
|
||||
b25fdG9vbEoOCghhdHRlbXB0cxICGAF6AhgBEo8BChDAqYWXAeTztQaza44KbHvPEgjLnc1ioqFZ
|
||||
2yoKVG9vbCBVc2FnZTABOWAPky2PNs8XQaCrky2PNs8XShoKDmNyZXdhaV92ZXJzaW9uEggKBjAu
|
||||
MzAuNkohCgl0b29sX25hbWUSFAoSbXVsdGlwbGNhdGlvbl90b29sSg4KCGF0dGVtcHRzEgIYAXoC
|
||||
GAE=
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate, br
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '5930'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.24.0
|
||||
method: POST
|
||||
uri: https://telemetry.crewai.com:4319/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Tue, 14 May 2024 01:26:13 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "You are Scorer. You''re an expert
|
||||
scorer, specialized in scoring titles.\nYour personal goal is: Score the titleTo
|
||||
give my best complete final answer to the task use the exact following format:\n\nThought:
|
||||
I now can give a great answer\nFinal Answer: my best complete final answer to
|
||||
the task.\nYour final answer must be the great and the most complete as possible,
|
||||
it must be outcome described.\n\nI MUST use these formats, my job depends on
|
||||
it!\nCurrent Task: Give me an integer score between 1-5 for the following title:
|
||||
''The impact of AI in the future of work''\n\nThis is the expect criteria for
|
||||
your final answer: The score of the title. \n you MUST return the actual complete
|
||||
content as the final answer, not a summary.\n\nBegin! This is VERY important
|
||||
to you, use the tools available and give your best Final Answer, your job depends
|
||||
on it!\n\nThought:\n"}], "model": "gpt-4", "n": 1, "stop": ["\nObservation"],
|
||||
body: '{"messages": [{"content": "You are Scorer. You''re an expert scorer, specialized
|
||||
in scoring titles.\nYour personal goal is: Score the titleTo give my best complete
|
||||
final answer to the task use the exact following format:\n\nThought: I now can
|
||||
give a great answer\nFinal Answer: my best complete final answer to the task.\nYour
|
||||
final answer must be the great and the most complete as possible, it must be
|
||||
outcome described.\n\nI MUST use these formats, my job depends on it!\nCurrent
|
||||
Task: Give me an integer score between 1-5 for the following title: ''The impact
|
||||
of AI in the future of work''\n\nThis is the expect criteria for your final
|
||||
answer: The score of the title. \n you MUST return the actual complete content
|
||||
as the final answer, not a summary.\n\nBegin! This is VERY important to you,
|
||||
use the tools available and give your best Final Answer, your job depends on
|
||||
it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"],
|
||||
"stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '996'
|
||||
- '997'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.29.0
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -172,7 +37,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.29.0
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -181,268 +46,64 @@ interactions:
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
string: 'data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"After"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
considering"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
relevance"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
depth"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
potential"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
interest"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
topic"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
as"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
well"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
as"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
clarity"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
and"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
simplicity"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
impact"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
work"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''.\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
Based"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
on"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
my"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
evaluation"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":","},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
can"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
give"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
title"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"The"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
impact"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
AI"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
in"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
future"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
work"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"''"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
a"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
score"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
great"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{"content":"4"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
out"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
of"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"
|
||||
"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"5"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{"content":"."},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9ObDhPtGi5k6ALX2jaFMR0vwXhSx0","object":"chat.completion.chunk","created":1715649973,"model":"gpt-4-0613","system_fingerprint":null,"choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
data: {"id":"chatcmpl-9hry2om1JBkreHpDHFbfD2YDtg2oA","object":"chat.completion.chunk","created":1720242582,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_d576307f90","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
@@ -453,20 +114,20 @@ interactions:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 8837194f3860a686-MIA
|
||||
- 89ed158b8bf0a566-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Tue, 14 May 2024 01:26:14 GMT
|
||||
- Sat, 06 Jul 2024 05:09:42 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=o_N1eUBbqWWHvbE622tXs5BC9yN1X1bxp3npQXI46JU-1715649974-1.0.1.1-ALOCo.oJW08V4.F5WqVArJDbxakTERtLPfwUlDjTYrrg_WiJox.Pw_n.PnDfEdxa52BbaPI8M80h2S3wdJy2sw;
|
||||
path=/; expires=Tue, 14-May-24 01:56:14 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=5C3MG9ni0I5bZoHGzfXZq16obGaD1INR3_.wX4CRPAk-1720242582-1.0.1.1-fZiD6L1FdBiC0gqcmBK9_IaHhbHPQi4z04fxYQtoDc9KbYqPvxm_sxP_RkuZX_AyPkHgu85IRq9E6MUAZJGzwQ;
|
||||
path=/; expires=Sat, 06-Jul-24 05:39:42 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=vYif.qp5YND_OTGCG3IgxPXT3sqDTSpeVEIUapP.n.k-1715649974367-0.0.1.1-604800000;
|
||||
- _cfuvid=YP7Z3XnHPKQDU2nOhrLzkxr8InOv42HLWchJd1ogneQ-1720242582534-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
@@ -475,55 +136,54 @@ interactions:
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '510'
|
||||
- '90'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9997'
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299098'
|
||||
- '15999772'
|
||||
x-ratelimit-reset-requests:
|
||||
- 13ms
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 180ms
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_26f0f1e02a54a0aaf7e8b96ba7d56837
|
||||
- req_36d283adbca77945609f0da658047ba0
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"role": "user", "content": "Based on my evaluation, I give
|
||||
the title ''The impact of AI in the future of work'' a score of 4 out of 5."},
|
||||
{"role": "system", "content": "I''m gonna convert this raw text into valid JSON."}],
|
||||
"model": "gpt-4", "tool_choice": {"type": "function", "function": {"name": "ScoreOutput"}},
|
||||
"tools": [{"type": "function", "function": {"name": "ScoreOutput", "description":
|
||||
"Correctly extracted `ScoreOutput` with all the required parameters with correct
|
||||
types", "parameters": {"properties": {"score": {"title": "Score", "type": "integer"}},
|
||||
"required": ["score"], "type": "object"}}}]}'
|
||||
body: '{"messages": [{"role": "user", "content": "4"}, {"role": "system", "content":
|
||||
"I''m gonna convert this raw text into valid JSON."}], "model": "gpt-4o", "tool_choice":
|
||||
{"type": "function", "function": {"name": "ScoreOutput"}}, "tools": [{"type":
|
||||
"function", "function": {"name": "ScoreOutput", "description": "Correctly extracted
|
||||
`ScoreOutput` with all the required parameters with correct types", "parameters":
|
||||
{"properties": {"score": {"title": "Score", "type": "integer"}}, "required":
|
||||
["score"], "type": "object"}}}]}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '621'
|
||||
- '519'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=o_N1eUBbqWWHvbE622tXs5BC9yN1X1bxp3npQXI46JU-1715649974-1.0.1.1-ALOCo.oJW08V4.F5WqVArJDbxakTERtLPfwUlDjTYrrg_WiJox.Pw_n.PnDfEdxa52BbaPI8M80h2S3wdJy2sw;
|
||||
_cfuvid=vYif.qp5YND_OTGCG3IgxPXT3sqDTSpeVEIUapP.n.k-1715649974367-0.0.1.1-604800000
|
||||
- __cf_bm=5C3MG9ni0I5bZoHGzfXZq16obGaD1INR3_.wX4CRPAk-1720242582-1.0.1.1-fZiD6L1FdBiC0gqcmBK9_IaHhbHPQi4z04fxYQtoDc9KbYqPvxm_sxP_RkuZX_AyPkHgu85IRq9E6MUAZJGzwQ;
|
||||
_cfuvid=YP7Z3XnHPKQDU2nOhrLzkxr8InOv42HLWchJd1ogneQ-1720242582534-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.29.0
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -533,7 +193,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.29.0
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -543,26 +203,27 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
g2QBAMTINfMxamd1CeMeEI6KoYFref5vBi4N2oOiMbVNv3QLhr1AppZoAgF1drbOzezQ65E14dmt
|
||||
ADjNKcFsksRMO1Ud3qXHqqnW3c7ZUq/M23vj7mI9eXr198urN1YEQJvOiiz2xGsuBurUGghycvlZ
|
||||
kVOi2W92e53hsN8X3tE2LxQlOHax2qk2es12r9OJnWZFoMSXAIBtaQTw3fsuJRqVVsQLQ6AEfgTQ
|
||||
W1VQgkkI0xATE1kB5MLWTUqYUinhXLRW/WWJUuRQDm7ruUX6a6LU3/3z09v8MH9W3WF6MLi0g3h7
|
||||
/LJO+pyQembtBoV6YJRiUFmdJIwANIme0FtPmfXFXRldGVUYAE38e0pw+22Ab4bM+uKbEp1vs6fq
|
||||
xr2oYz/0+J5iGZUdO2/TsGA4o9YBfzeEjVGCIVpHY+0F8GPJammcS3Teahf/op0XJlCi2ejpBLhp
|
||||
cghsmHJfPJATmc1mVwySYByBlb/R1IwL7/zUrrLYCwMD
|
||||
H4sIAAAAAAAAA2xS30/bMBB+z19h3XMzhbShJW8wiW1MGogixDRQ5DpOanB8ln1hK1X/d+Q0NKFa
|
||||
HqzTfff90F22EWOgSsgZiDUn0Vgdn63dJl3d3rSv5xuf/b7Di/PLq/JUVYub5wwmgYGrZynog/VF
|
||||
YGO1JIVmDwsnOcmgejJPk3SWZou0AxospQ602lI8wziAcZLFJ9OeuEYlpIec/YkYY2zbvSGiKeU/
|
||||
yFky+eg00nteS8gPQ4yBQx06wL1XnrghmAygQEPShNSm1XoEEKIuBNd6MN5/21E97IlrXdxePlzN
|
||||
/s6/rpZv3x+WF9P7nz++vdz/8iO/vfTGdoGq1ojDfkb4oZ8fmTEGhjcddynQyeuWbEtHdMaAu7pt
|
||||
pKEQHbaP4MPwI+SzHXwa3UX/q5/6andYq8baOlz5oy1BpYzy68JJ7ru04Ant3iLIPXXnaz9dBKzD
|
||||
xlJB+CJNEFz014PhfxnArMcIiesRJ4v6eOA3nmRTVMrU0lmnulNCZYsym59Ok3l1lkC0i94BAAD/
|
||||
/wMAylx2sdMCAAA=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 88371965caa0a686-MIA
|
||||
- 89ed158dee46a566-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Tue, 14 May 2024 01:26:18 GMT
|
||||
- Sat, 06 Jul 2024 05:09:42 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
@@ -572,25 +233,25 @@ interactions:
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '994'
|
||||
- '144'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '300000'
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '299944'
|
||||
- '15999969'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 11ms
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_a9b61926ad5fdaf004dd4b99738bcadb
|
||||
- req_990566332b9b1851c581486c0a4da0e6
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: !!binary |
|
||||
CtoyCiQKIgoMc2VydmljZS5uYW1lEhIKEGNyZXdBSS10ZWxlbWV0cnkSsTIKEgoQY3Jld2FpLnRl
|
||||
bGVtZXRyeRJpChCqSW3lVdefwRBLNXi6Xhm8EgijxkgGOCJOMSoQVG9vbCBVc2FnZSBFcnJvcjAB
|
||||
OQh+giEMv94XQYgthSEMv94XShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMzAuMTF6AhgBhQEAAQAA
|
||||
EmkKEITX9xWzBQ0KqeidbMtD1zESCLEDq6L4lZGPKhBUb29sIFVzYWdlIEVycm9yMAE5OF5weAy/
|
||||
3hdBqOZyeAy/3hdKGwoOY3Jld2FpX3ZlcnNpb24SCQoHMC4zMC4xMXoCGAGFAQABAAASgAIKEPfc
|
||||
duYrDUwkgj4RUzAtD2cSCFdOYgVbkCuLKg5UYXNrIEV4ZWN1dGlvbjABOciuqqkLv94XQcA1NNcM
|
||||
v94XSjEKB3Rhc2tfaWQSJgokMzg2ZTBkMWQtMWVjNy00M2QzLTg3MWItNWU3ZjBiZjBkOWVmSi0K
|
||||
FWZvcm1hdHRlZF9kZXNjcmlwdGlvbhIUChJIb3cgbXVjaCBpcyAyICsgMj9KQwoZZm9ybWF0dGVk
|
||||
X2V4cGVjdGVkX291dHB1dBImCiRUaGUgcmVzdWx0IG9mIHRoZSBzdW0gYXMgYW4gaW50ZWdlci5K
|
||||
DQoGb3V0cHV0EgMKATR6AhgBhQEAAQAAErYLChC8sKh5qQC3H99eVsmrP4vCEgizWQpCjdW1WSoM
|
||||
Q3JldyBDcmVhdGVkMAE5eIbY2Qy/3hdBSILa2Qy/3hdKGwoOY3Jld2FpX3ZlcnNpb24SCQoHMC4z
|
||||
MC4xMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjExLjVKMQoHY3Jld19pZBImCiQ1MjA2MDE0Zi0w
|
||||
NDUyLTQzNjQtOWZiMS1lNWM4MzZmNmZiYmJKHAoMY3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxK
|
||||
EQoLY3Jld19tZW1vcnkSAhAAShoKFGNyZXdfbnVtYmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251
|
||||
bWJlcl9vZl9hZ2VudHMSAhgBStIECgtjcmV3X2FnZW50cxLCBAq/BFt7ImlkIjogIjU2OTBjOGJm
|
||||
LWVkMzEtNGU1OC04NzBhLTE2OWM3OTQ1ODdjOSIsICJyb2xlIjogIlJlc2VhcmNoZXIiLCAiZ29h
|
||||
bCI6ICJNYWtlIHRoZSBiZXN0IHJlc2VhcmNoIGFuZCBhbmFseXNpcyBvbiBjb250ZW50IGFib3V0
|
||||
IEFJIGFuZCBBSSBhZ2VudHMiLCAiYmFja3N0b3J5IjogIllvdSdyZSBhbiBleHBlcnQgcmVzZWFy
|
||||
Y2hlciwgc3BlY2lhbGl6ZWQgaW4gdGVjaG5vbG9neSwgc29mdHdhcmUgZW5naW5lZXJpbmcsIEFJ
|
||||
IGFuZCBzdGFydHVwcy4gWW91IHdvcmsgYXMgYSBmcmVlbGFuY2VyIGFuZCBpcyBub3cgd29ya2lu
|
||||
ZyBvbiBkb2luZyByZXNlYXJjaCBhbmQgYW5hbHlzaXMgZm9yIGEgbmV3IGN1c3RvbWVyLiIsICJ2
|
||||
ZXJib3NlPyI6IGZhbHNlLCAibWF4X2l0ZXIiOiAyNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6
|
||||
IG51bGwsICJsbG0iOiAie1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNG9c
|
||||
IiwgXCJ0ZW1wZXJhdHVyZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRl
|
||||
bGVnYXRpb25fZW5hYmxlZD8iOiB0cnVlLCAidG9vbHNfbmFtZXMiOiBbXX1dStACCgpjcmV3X3Rh
|
||||
c2tzEsECCr4CW3siaWQiOiAiZTE5ODM4Y2EtOGNhMi00MzhiLThiNmMtNDFmM2VlYjJmMDA1Iiwg
|
||||
ImRlc2NyaXB0aW9uIjogIkxvb2sgYXQgdGhlIGF2YWlsYWJsZSBkYXRhIG5kIGdpdmUgbWUgYSBz
|
||||
ZW5zZSBvbiB0aGUgdG90YWwgbnVtYmVyIG9mIHNhbGVzLiIsICJleHBlY3RlZF9vdXRwdXQiOiAi
|
||||
VGhlIHRvdGFsIG51bWJlciBvZiBzYWxlcyBhcyBhbiBpbnRlZ2VyIiwgImFzeW5jX2V4ZWN1dGlv
|
||||
bj8iOiBmYWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJSZXNlYXJj
|
||||
aGVyIiwgImNvbnRleHQiOiBudWxsLCAidG9vbHNfbmFtZXMiOiBbXX1dSioKCHBsYXRmb3JtEh4K
|
||||
HG1hY09TLTE0LjEuMS1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4x
|
||||
LjBKGwoPcGxhdGZvcm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURh
|
||||
cndpbiBLZXJuZWwgVmVyc2lvbiAyMy4xLjA6IE1vbiBPY3QgIDkgMjE6Mjc6MjQgUERUIDIwMjM7
|
||||
IHJvb3Q6eG51LTEwMDAyLjQxLjl+Ni9SRUxFQVNFX0FSTTY0X1Q2MDAwSgoKBGNwdXMSAhgKegIY
|
||||
AYUBAAEAABL/CAoQ96LQBkWdya2FFyVOx27tLRIIXv2S0EgHgE4qDENyZXcgQ3JlYXRlZDABOdDJ
|
||||
bNwMv94XQRiybtwMv94XShsKDmNyZXdhaV92ZXJzaW9uEgkKBzAuMzAuMTFKGgoOcHl0aG9uX3Zl
|
||||
cnNpb24SCAoGMy4xMS41SjEKB2NyZXdfaWQSJgokOGM5OTQzNDMtYzcyYi00MzIwLTlkNTItNzI2
|
||||
MTlkNmNmZTc3ShwKDGNyZXdfcHJvY2VzcxIMCgpzZXF1ZW50aWFsShEKC2NyZXdfbWVtb3J5EgIQ
|
||||
AEoaChRjcmV3X251bWJlcl9vZl90YXNrcxICGAFKGwoVY3Jld19udW1iZXJfb2ZfYWdlbnRzEgIY
|
||||
AUr+AgoLY3Jld19hZ2VudHMS7gIK6wJbeyJpZCI6ICI1YThjOGRjZS00YmQyLTQyMWEtYjUzZC1k
|
||||
ZjE5ODEzMDFiYjEiLCAicm9sZSI6ICJSZXNlYXJjaGVyIiwgImdvYWwiOiAiQmUgc3VwZXIgZW1w
|
||||
YXRoZXRpYy4iLCAiYmFja3N0b3J5IjogIllvdSdyZSBsb3ZlIHRvIHNleSBob3dkeS4iLCAidmVy
|
||||
Ym9zZT8iOiBmYWxzZSwgIm1heF9pdGVyIjogMjUsICJtYXhfcnBtIjogbnVsbCwgImkxOG4iOiBu
|
||||
dWxsLCAibGxtIjogIntcIm5hbWVcIjogbnVsbCwgXCJtb2RlbF9uYW1lXCI6IFwiZ3B0LTRvXCIs
|
||||
IFwidGVtcGVyYXR1cmVcIjogMC43LCBcImNsYXNzXCI6IFwiQ2hhdE9wZW5BSVwifSIsICJkZWxl
|
||||
Z2F0aW9uX2VuYWJsZWQ/IjogZmFsc2UsICJ0b29sc19uYW1lcyI6IFtdfV1K7QEKCmNyZXdfdGFz
|
||||
a3MS3gEK2wFbeyJpZCI6ICJmNWEwMjU3Ni1iNTYxLTRiNzQtOTNhMC0yNmYxMDc2YWI5M2MiLCAi
|
||||
ZGVzY3JpcHRpb24iOiAic2F5IGhvd2R5IiwgImV4cGVjdGVkX291dHB1dCI6ICJIb3dkeSEiLCAi
|
||||
YXN5bmNfZXhlY3V0aW9uPyI6IGZhbHNlLCAiaHVtYW5faW5wdXQ/IjogZmFsc2UsICJhZ2VudF9y
|
||||
b2xlIjogIlJlc2VhcmNoZXIiLCAiY29udGV4dCI6IG51bGwsICJ0b29sc19uYW1lcyI6IFtdfV1K
|
||||
KgoIcGxhdGZvcm0SHgocbWFjT1MtMTQuMS4xLWFybTY0LWFybS02NGJpdEocChBwbGF0Zm9ybV9y
|
||||
ZWxlYXNlEggKBjIzLjEuMEobCg9wbGF0Zm9ybV9zeXN0ZW0SCAoGRGFyd2luSnsKEHBsYXRmb3Jt
|
||||
X3ZlcnNpb24SZwplRGFyd2luIEtlcm5lbCBWZXJzaW9uIDIzLjEuMDogTW9uIE9jdCAgOSAyMToy
|
||||
NzoyNCBQRFQgMjAyMzsgcm9vdDp4bnUtMTAwMDIuNDEuOX42L1JFTEVBU0VfQVJNNjRfVDYwMDBK
|
||||
CgoEY3B1cxICGAp6AhgBhQEAAQAAErgCChCZQqdNoQoZm/bgnzRGX2OfEgguN/7szlsLYioOVGFz
|
||||
ayBFeGVjdXRpb24wATmwOXfcDL/eF0FoPgTeDL/eF0oxCgd0YXNrX2lkEiYKJGY1YTAyNTc2LWI1
|
||||
NjEtNGI3NC05M2EwLTI2ZjEwNzZhYjkzY0okChVmb3JtYXR0ZWRfZGVzY3JpcHRpb24SCwoJc2F5
|
||||
IGhvd2R5SiUKGWZvcm1hdHRlZF9leHBlY3RlZF9vdXRwdXQSCAoGSG93ZHkhSmwKBm91dHB1dBJi
|
||||
CmBIb3dkeSEgSSBob3BlIHRoaXMgbWVzc2FnZSBmaW5kcyB5b3Ugd2VsbCBhbmQgYnJpbmdzIGEg
|
||||
c21pbGUgdG8geW91ciBmYWNlLiBIYXZlIGEgZmFudGFzdGljIGRheSF6AhgBhQEAAQAAEv8IChBG
|
||||
RLGvZYMTRLcpQuhEq3MREggMeXM0BUGtiSoMQ3JldyBDcmVhdGVkMAE5KKNZ4Qy/3hdBWI9b4Qy/
|
||||
3hdKGwoOY3Jld2FpX3ZlcnNpb24SCQoHMC4zMC4xMUoaCg5weXRob25fdmVyc2lvbhIICgYzLjEx
|
||||
LjVKMQoHY3Jld19pZBImCiQxN2Q3YmMzYi04MjE2LTQ4MWMtOGU2YS0zN2U4MDA3M2E1YmJKHAoM
|
||||
Y3Jld19wcm9jZXNzEgwKCnNlcXVlbnRpYWxKEQoLY3Jld19tZW1vcnkSAhAAShoKFGNyZXdfbnVt
|
||||
YmVyX29mX3Rhc2tzEgIYAUobChVjcmV3X251bWJlcl9vZl9hZ2VudHMSAhgBSv4CCgtjcmV3X2Fn
|
||||
ZW50cxLuAgrrAlt7ImlkIjogIjRiYTAzMmMzLWE1NDktNDQyMS05MjY0LTY1ZmVmODZhMTI3MiIs
|
||||
ICJyb2xlIjogIlJlc2VhcmNoZXIiLCAiZ29hbCI6ICJCZSBzdXBlciBlbXBhdGhldGljLiIsICJi
|
||||
YWNrc3RvcnkiOiAiWW91J3JlIGxvdmUgdG8gc2V5IGhvd2R5LiIsICJ2ZXJib3NlPyI6IGZhbHNl
|
||||
LCAibWF4X2l0ZXIiOiAyNSwgIm1heF9ycG0iOiBudWxsLCAiaTE4biI6IG51bGwsICJsbG0iOiAi
|
||||
e1wibmFtZVwiOiBudWxsLCBcIm1vZGVsX25hbWVcIjogXCJncHQtNG9cIiwgXCJ0ZW1wZXJhdHVy
|
||||
ZVwiOiAwLjcsIFwiY2xhc3NcIjogXCJDaGF0T3BlbkFJXCJ9IiwgImRlbGVnYXRpb25fZW5hYmxl
|
||||
ZD8iOiBmYWxzZSwgInRvb2xzX25hbWVzIjogW119XUrtAQoKY3Jld190YXNrcxLeAQrbAVt7Imlk
|
||||
IjogImJlOGZjMWNmLTVjMTYtNDQ4NS04NDZkLTlmNzkxNjcxZmE0NCIsICJkZXNjcmlwdGlvbiI6
|
||||
ICJzYXkgaG93ZHkiLCAiZXhwZWN0ZWRfb3V0cHV0IjogIkhvd2R5ISIsICJhc3luY19leGVjdXRp
|
||||
b24/IjogZmFsc2UsICJodW1hbl9pbnB1dD8iOiBmYWxzZSwgImFnZW50X3JvbGUiOiAiUmVzZWFy
|
||||
Y2hlciIsICJjb250ZXh0IjogbnVsbCwgInRvb2xzX25hbWVzIjogW119XUoqCghwbGF0Zm9ybRIe
|
||||
ChxtYWNPUy0xNC4xLjEtYXJtNjQtYXJtLTY0Yml0ShwKEHBsYXRmb3JtX3JlbGVhc2USCAoGMjMu
|
||||
MS4wShsKD3BsYXRmb3JtX3N5c3RlbRIICgZEYXJ3aW5KewoQcGxhdGZvcm1fdmVyc2lvbhJnCmVE
|
||||
YXJ3aW4gS2VybmVsIFZlcnNpb24gMjMuMS4wOiBNb24gT2N0ICA5IDIxOjI3OjI0IFBEVCAyMDIz
|
||||
OyByb290OnhudS0xMDAwMi40MS45fjYvUkVMRUFTRV9BUk02NF9UNjAwMEoKCgRjcHVzEgIYCnoC
|
||||
GAGFAQABAAAS3gEKEPaxZgbRdGE/aC7TcpTEU3USCO2gTkTtndVwKg5UYXNrIEV4ZWN1dGlvbjAB
|
||||
OSjeZOEMv94XQXgCO+IMv94XSjEKB3Rhc2tfaWQSJgokYmU4ZmMxY2YtNWMxNi00NDg1LTg0NmQt
|
||||
OWY3OTE2NzFmYTQ0SiQKFWZvcm1hdHRlZF9kZXNjcmlwdGlvbhILCglzYXkgaG93ZHlKJQoZZm9y
|
||||
bWF0dGVkX2V4cGVjdGVkX291dHB1dBIICgZIb3dkeSFKEgoGb3V0cHV0EggKBkhvd2R5IXoCGAGF
|
||||
AQABAAAS6AwKEDPpREZrHZXFHl0sOJRtgesSCBN6824xY2RxKgxDcmV3IENyZWF0ZWQwATl40Vfi
|
||||
DL/eF0HQY1niDL/eF0obCg5jcmV3YWlfdmVyc2lvbhIJCgcwLjMwLjExShoKDnB5dGhvbl92ZXJz
|
||||
aW9uEggKBjMuMTEuNUoxCgdjcmV3X2lkEiYKJGU3YjFlNDdjLTM4OTQtNDUyYi1hNzRjLWZiZTU3
|
||||
NDUxOWQxOEocCgxjcmV3X3Byb2Nlc3MSDAoKc2VxdWVudGlhbEoRCgtjcmV3X21lbW9yeRICEABK
|
||||
GgoUY3Jld19udW1iZXJfb2ZfdGFza3MSAhgBShsKFWNyZXdfbnVtYmVyX29mX2FnZW50cxICGAFK
|
||||
0wQKC2NyZXdfYWdlbnRzEsMECsAEW3siaWQiOiAiYzRmZTBkOTYtNzRkMy00MTk4LWI5MDQtYWFi
|
||||
NzBlZGMxYjQ1IiwgInJvbGUiOiAiUmVzZWFyY2hlciIsICJnb2FsIjogIk1ha2UgdGhlIGJlc3Qg
|
||||
cmVzZWFyY2ggYW5kIGFuYWx5c2lzIG9uIGNvbnRlbnQgYWJvdXQgQUkgYW5kIEFJIGFnZW50cyIs
|
||||
ICJiYWNrc3RvcnkiOiAiWW91J3JlIGFuIGV4cGVydCByZXNlYXJjaGVyLCBzcGVjaWFsaXplZCBp
|
||||
biB0ZWNobm9sb2d5LCBzb2Z0d2FyZSBlbmdpbmVlcmluZywgQUkgYW5kIHN0YXJ0dXBzLiBZb3Ug
|
||||
d29yayBhcyBhIGZyZWVsYW5jZXIgYW5kIGlzIG5vdyB3b3JraW5nIG9uIGRvaW5nIHJlc2VhcmNo
|
||||
IGFuZCBhbmFseXNpcyBmb3IgYSBuZXcgY3VzdG9tZXIuIiwgInZlcmJvc2U/IjogZmFsc2UsICJt
|
||||
YXhfaXRlciI6IDI1LCAibWF4X3JwbSI6IG51bGwsICJpMThuIjogbnVsbCwgImxsbSI6ICJ7XCJu
|
||||
YW1lXCI6IG51bGwsIFwibW9kZWxfbmFtZVwiOiBcImdwdC00b1wiLCBcInRlbXBlcmF0dXJlXCI6
|
||||
IDAuNywgXCJjbGFzc1wiOiBcIkNoYXRPcGVuQUlcIn0iLCAiZGVsZWdhdGlvbl9lbmFibGVkPyI6
|
||||
IGZhbHNlLCAidG9vbHNfbmFtZXMiOiBbXX1dSoEECgpjcmV3X3Rhc2tzEvIDCu8DW3siaWQiOiAi
|
||||
YzE2NTkyZjktZDM2Yy00MDFlLWJiMTEtYzdlMGY5ODkxZjA2IiwgImRlc2NyaXB0aW9uIjogIkNv
|
||||
bWUgdXAgd2l0aCBhIGxpc3Qgb2YgNSBpbnRlcmVzdGluZyBpZGVhcyB0byBleHBsb3JlIGZvciBh
|
||||
biBhcnRpY2xlLCB0aGVuIHdyaXRlIG9uZSBhbWF6aW5nIHBhcmFncmFwaCBoaWdobGlnaHQgZm9y
|
||||
IGVhY2ggaWRlYSB0aGF0IHNob3djYXNlcyBob3cgZ29vZCBhbiBhcnRpY2xlIGFib3V0IHRoaXMg
|
||||
dG9waWMgY291bGQgYmUuIFJldHVybiB0aGUgbGlzdCBvZiBpZGVhcyB3aXRoIHRoZWlyIHBhcmFn
|
||||
cmFwaCBhbmQgeW91ciBub3Rlcy4iLCAiZXhwZWN0ZWRfb3V0cHV0IjogIjUgYnVsbGV0IHBvaW50
|
||||
cyB3aXRoIGEgcGFyYWdyYXBoIGZvciBlYWNoIGlkZWEuIiwgImFzeW5jX2V4ZWN1dGlvbj8iOiBm
|
||||
YWxzZSwgImh1bWFuX2lucHV0PyI6IGZhbHNlLCAiYWdlbnRfcm9sZSI6ICJOb25lIiwgImNvbnRl
|
||||
eHQiOiBudWxsLCAidG9vbHNfbmFtZXMiOiBbXX1dSioKCHBsYXRmb3JtEh4KHG1hY09TLTE0LjEu
|
||||
MS1hcm02NC1hcm0tNjRiaXRKHAoQcGxhdGZvcm1fcmVsZWFzZRIICgYyMy4xLjBKGwoPcGxhdGZv
|
||||
cm1fc3lzdGVtEggKBkRhcndpbkp7ChBwbGF0Zm9ybV92ZXJzaW9uEmcKZURhcndpbiBLZXJuZWwg
|
||||
VmVyc2lvbiAyMy4xLjA6IE1vbiBPY3QgIDkgMjE6Mjc6MjQgUERUIDIwMjM7IHJvb3Q6eG51LTEw
|
||||
MDAyLjQxLjl+Ni9SRUxFQVNFX0FSTTY0X1Q2MDAwSgoKBGNwdXMSAhgKegIYAYUBAAEAAA==
|
||||
headers:
|
||||
Accept:
|
||||
- '*/*'
|
||||
Accept-Encoding:
|
||||
- gzip, deflate, br
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Length:
|
||||
- '6493'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
User-Agent:
|
||||
- OTel-OTLP-Exporter-Python/1.25.0
|
||||
method: POST
|
||||
uri: https://telemetry.crewai.com:4319/v1/traces
|
||||
response:
|
||||
body:
|
||||
string: "\n\0"
|
||||
headers:
|
||||
Content-Length:
|
||||
- '2'
|
||||
Content-Type:
|
||||
- application/x-protobuf
|
||||
Date:
|
||||
- Wed, 03 Jul 2024 15:56:09 GMT
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,335 @@
|
||||
interactions:
|
||||
- request:
|
||||
body: '{"messages": [{"content": "You are Friendly Neighbor. You are the friendly
|
||||
neighbor\nYour personal goal is: Make everyone feel welcome\nYou ONLY have access
|
||||
to the following tools, and should NEVER make up tools that are not listed here:\n\nDecide
|
||||
Greetings() -> str - Decide Greetings() - Decide what is the appropriate greeting
|
||||
to use\n\nUse the following format:\n\nThought: you should always think about
|
||||
what to do\nAction: the action to take, only one name of [Decide Greetings],
|
||||
just the name, exactly as it''s written.\nAction Input: the input to the action,
|
||||
just a simple python dictionary, enclosed in curly braces, using \" to wrap
|
||||
keys and values.\nObservation: the result of the action\n\nOnce all necessary
|
||||
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
|
||||
the final answer to the original input question\n\nCurrent Task: Say an appropriate
|
||||
greeting.\n\nThis is the expect criteria for your final answer: The greeting.
|
||||
\n you MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||
This is VERY important to you, use the tools available and give your best Final
|
||||
Answer, your job depends on it!\n\nThought:\n", "role": "user"}], "model": "gpt-4o",
|
||||
"n": 1, "stop": ["\nObservation"], "stream": true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1289'
|
||||
content-type:
|
||||
- application/json
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.9
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
need"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
to"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
decide"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
on"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
an"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
appropriate"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
greeting"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
Decide"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
Greetings"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"Action"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
Input"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{"content":"
|
||||
{}\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWRAEA0akLHaVsdYQP1dYZ73QJC","object":"chat.completion.chunk","created":1720137083,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_4008e3b719","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89e305e3c8e382f5-GIG
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Thu, 04 Jul 2024 23:51:24 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=y7BtDW9RWNaYoBExulKsMw50ppqr1itieWbcStDWqVc-1720137084-1.0.1.1-EYCEQ9jOimP45.FgXjdzWftUrV1HHm49W4wbcxFhbrj2DVC1LnMbz9.l.c._AqBRgFAE3xVolosvjmoFDAMPYQ;
|
||||
path=/; expires=Fri, 05-Jul-24 00:21:24 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=pZBoWQ1_gTeUh2oe6ta.S2mxWtdaHvAtn6m2HszLdwk-1720137084219-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '335'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '15999700'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1ms
|
||||
x-request-id:
|
||||
- req_b3f7e3c47df2641d6bef704ef3ae8a0f
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"messages": [{"content": "You are Friendly Neighbor. You are the friendly
|
||||
neighbor\nYour personal goal is: Make everyone feel welcome\nYou ONLY have access
|
||||
to the following tools, and should NEVER make up tools that are not listed here:\n\nDecide
|
||||
Greetings() -> str - Decide Greetings() - Decide what is the appropriate greeting
|
||||
to use\n\nUse the following format:\n\nThought: you should always think about
|
||||
what to do\nAction: the action to take, only one name of [Decide Greetings],
|
||||
just the name, exactly as it''s written.\nAction Input: the input to the action,
|
||||
just a simple python dictionary, enclosed in curly braces, using \" to wrap
|
||||
keys and values.\nObservation: the result of the action\n\nOnce all necessary
|
||||
information is gathered:\n\nThought: I now know the final answer\nFinal Answer:
|
||||
the final answer to the original input question\n\nCurrent Task: Say an appropriate
|
||||
greeting.\n\nThis is the expect criteria for your final answer: The greeting.
|
||||
\n you MUST return the actual complete content as the final answer, not a summary.\n\nBegin!
|
||||
This is VERY important to you, use the tools available and give your best Final
|
||||
Answer, your job depends on it!\n\nThought:\nI need to decide on an appropriate
|
||||
greeting.\n\nAction: Decide Greetings\nAction Input: {}\n\nObservation: Howdy!\n",
|
||||
"role": "user"}], "model": "gpt-4o", "n": 1, "stop": ["\nObservation"], "stream":
|
||||
true, "temperature": 0.7}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '1404'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=y7BtDW9RWNaYoBExulKsMw50ppqr1itieWbcStDWqVc-1720137084-1.0.1.1-EYCEQ9jOimP45.FgXjdzWftUrV1HHm49W4wbcxFhbrj2DVC1LnMbz9.l.c._AqBRgFAE3xVolosvjmoFDAMPYQ;
|
||||
_cfuvid=pZBoWQ1_gTeUh2oe6ta.S2mxWtdaHvAtn6m2HszLdwk-1720137084219-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.9
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/chat/completions
|
||||
response:
|
||||
body:
|
||||
string: 'data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"role":"assistant","content":""},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Thought"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
I"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
now"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
know"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
the"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":".\n\n"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"Final"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
Answer"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":":"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"
|
||||
How"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"dy"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{"content":"!"},"logprobs":null,"finish_reason":null}]}
|
||||
|
||||
|
||||
data: {"id":"chatcmpl-9hQWSD5B35ANI9JLmbxUdPECfNd43","object":"chat.completion.chunk","created":1720137084,"model":"gpt-4o-2024-05-13","system_fingerprint":"fp_ce0793330f","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"stop"}]}
|
||||
|
||||
|
||||
data: [DONE]
|
||||
|
||||
|
||||
'
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89e305ea4abc82f5-GIG
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Type:
|
||||
- text/event-stream; charset=utf-8
|
||||
Date:
|
||||
- Thu, 04 Jul 2024 23:51:24 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '91'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '16000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '15999673'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 1ms
|
||||
x-request-id:
|
||||
- req_10032db16fa190e8435947a6aaa700ff
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
version: 1
|
||||
@@ -413,42 +413,42 @@ def test_api_calls_throttling(capsys):
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_crew_full_ouput():
|
||||
def test_crew_kickoff_for_each_full_ouput():
|
||||
inputs = [
|
||||
{"topic": "dog"},
|
||||
{"topic": "cat"},
|
||||
{"topic": "apple"},
|
||||
]
|
||||
|
||||
agent = Agent(
|
||||
role="test role",
|
||||
goal="test goal",
|
||||
backstory="test backstory",
|
||||
allow_delegation=False,
|
||||
verbose=True,
|
||||
role="{topic} Researcher",
|
||||
goal="Express hot takes on {topic}.",
|
||||
backstory="You have a lot of experience with {topic}.",
|
||||
)
|
||||
|
||||
task1 = Task(
|
||||
description="just say hi!",
|
||||
expected_output="your greeting",
|
||||
agent=agent,
|
||||
)
|
||||
task2 = Task(
|
||||
description="just say hello!",
|
||||
expected_output="your greeting",
|
||||
task = Task(
|
||||
description="Give me an analysis around {topic}.",
|
||||
expected_output="1 bullet point about {topic} that's under 15 words.",
|
||||
agent=agent,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[agent], tasks=[task1, task2], full_output=True)
|
||||
crew = Crew(agents=[agent], tasks=[task], full_output=True)
|
||||
results = crew.kickoff_for_each(inputs=inputs)
|
||||
|
||||
result = crew.kickoff()
|
||||
assert len(results) == len(inputs)
|
||||
for result in results:
|
||||
assert "usage_metrics" in result
|
||||
assert isinstance(result["usage_metrics"], dict)
|
||||
|
||||
expected_usage_metrics = {
|
||||
"total_tokens": 348,
|
||||
"prompt_tokens": 314,
|
||||
"completion_tokens": 34,
|
||||
"successful_requests": 2,
|
||||
}
|
||||
print(result.output)
|
||||
assert result.token_usage == expected_usage_metrics
|
||||
expected_final_string_output = "Hello!"
|
||||
assert result.tasks_output == [task1.output, task2.output]
|
||||
assert result.result() == expected_final_string_output
|
||||
assert result.raw_output() == expected_final_string_output
|
||||
# Assert that all required keys are in usage_metrics and their values are not None
|
||||
for key in [
|
||||
"total_tokens",
|
||||
"prompt_tokens",
|
||||
"completion_tokens",
|
||||
"successful_requests",
|
||||
]:
|
||||
assert key in result["usage_metrics"]
|
||||
assert result["usage_metrics"][key] > 0
|
||||
|
||||
|
||||
def test_agents_rpm_is_never_set_if_crew_max_RPM_is_not_set():
|
||||
@@ -490,7 +490,7 @@ TODO: TEST THE SAME THING BUT WITH HIERARCHICAL PROCESS
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_sequential_async_task_execution_completion():
|
||||
list_ideas = Task(
|
||||
description="Give me a list of 5 interesting ideas to explore for na article, what makes them unique and interesting.",
|
||||
description="Give me a list of 5 interesting ideas to explore for an article, what makes them unique and interesting.",
|
||||
expected_output="Bullet point list of 5 important events.",
|
||||
agent=researcher,
|
||||
async_execution=True,
|
||||
@@ -1128,9 +1128,12 @@ def test_code_execution_flag_adds_code_tool_upon_kickoff():
|
||||
)
|
||||
|
||||
crew = Crew(agents=[programmer], tasks=[task])
|
||||
crew.kickoff()
|
||||
assert len(programmer.tools) == 1
|
||||
assert programmer.tools[0].__class__ == CodeInterpreterTool
|
||||
|
||||
with patch.object(Agent, "execute_task") as executor:
|
||||
executor.return_value = "ok"
|
||||
crew.kickoff()
|
||||
assert len(programmer.tools) == 1
|
||||
assert programmer.tools[0].__class__ == CodeInterpreterTool
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -1204,6 +1207,29 @@ def test_agent_usage_metrics_are_captured_for_sequential_process():
|
||||
assert crew.usage_metrics[key] > 0, f"Value for key '{key}' is zero"
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_sequential_crew_creation_tasks_without_agents():
|
||||
task = Task(
|
||||
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
||||
expected_output="5 bullet points with a paragraph for each idea.",
|
||||
# agent=researcher, # not having an agent on the task should throw an error
|
||||
)
|
||||
|
||||
# Expected Output: The sequential crew should fail to create because the task is missing an agent
|
||||
with pytest.raises(pydantic_core._pydantic_core.ValidationError) as exec_info:
|
||||
Crew(
|
||||
tasks=[task],
|
||||
agents=[researcher],
|
||||
process=Process.sequential,
|
||||
)
|
||||
|
||||
assert exec_info.value.errors()[0]["type"] == "missing_agent_in_task"
|
||||
assert (
|
||||
"Agent is missing in the task with the following description"
|
||||
in exec_info.value.errors()[0]["msg"]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_agent_usage_metrics_are_captured_for_hierarchical_process():
|
||||
from langchain_openai import ChatOpenAI
|
||||
@@ -1227,15 +1253,67 @@ def test_agent_usage_metrics_are_captured_for_hierarchical_process():
|
||||
result = crew.kickoff()
|
||||
assert result.raw_output() == '"Howdy!"'
|
||||
|
||||
required_keys = [
|
||||
"total_tokens",
|
||||
"prompt_tokens",
|
||||
"completion_tokens",
|
||||
"successful_requests",
|
||||
]
|
||||
for key in required_keys:
|
||||
assert key in crew.usage_metrics, f"Key '{key}' not found in usage_metrics"
|
||||
assert crew.usage_metrics[key] > 0, f"Value for key '{key}' is zero"
|
||||
assert crew.usage_metrics == {
|
||||
"total_tokens": 1927,
|
||||
"prompt_tokens": 1557,
|
||||
"completion_tokens": 370,
|
||||
"successful_requests": 4,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_hierarchical_crew_creation_tasks_with_agents():
|
||||
"""
|
||||
Agents are not required for tasks in a hierarchical process but sometimes they are still added
|
||||
This test makes sure that the manager still delegates the task to the agent even if the agent is passed in the task
|
||||
"""
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
task = Task(
|
||||
description="Write one amazing paragraph about AI.",
|
||||
expected_output="A single paragraph with 4 sentences.",
|
||||
agent=writer,
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
tasks=[task],
|
||||
agents=[writer, researcher],
|
||||
process=Process.hierarchical,
|
||||
manager_llm=ChatOpenAI(model="gpt-4o"),
|
||||
)
|
||||
crew.kickoff()
|
||||
assert crew.manager_agent is not None
|
||||
assert crew.manager_agent.tools is not None
|
||||
assert crew.manager_agent.tools[0].description.startswith(
|
||||
"Delegate a specific task to one of the following coworkers: [Senior Writer]"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_hierarchical_crew_creation_tasks_without_async_execution():
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
task = Task(
|
||||
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
||||
expected_output="5 bullet points with a paragraph for each idea.",
|
||||
async_execution=True, # should throw an error
|
||||
)
|
||||
|
||||
with pytest.raises(pydantic_core._pydantic_core.ValidationError) as exec_info:
|
||||
Crew(
|
||||
tasks=[task],
|
||||
agents=[researcher],
|
||||
process=Process.hierarchical,
|
||||
manager_llm=ChatOpenAI(model="gpt-4o"),
|
||||
)
|
||||
|
||||
assert (
|
||||
exec_info.value.errors()[0]["type"] == "async_execution_in_hierarchical_process"
|
||||
)
|
||||
assert (
|
||||
"Hierarchical process error: Tasks cannot be flagged with async_execution."
|
||||
in exec_info.value.errors()[0]["msg"]
|
||||
)
|
||||
|
||||
|
||||
def test_crew_inputs_interpolate_both_agents_and_tasks():
|
||||
@@ -1248,9 +1326,10 @@ def test_crew_inputs_interpolate_both_agents_and_tasks():
|
||||
task = Task(
|
||||
description="Give me an analysis around {topic}.",
|
||||
expected_output="{points} bullet points about {topic}.",
|
||||
agent=agent,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[agent], tasks=[task], inputs={"topic": "AI", "points": 5})
|
||||
crew = Crew(agents=[agent], tasks=[task])
|
||||
inputs = {"topic": "AI", "points": 5}
|
||||
crew._interpolate_inputs(inputs=inputs) # Manual call for now
|
||||
|
||||
@@ -1643,6 +1722,7 @@ def test_crew_train_success(task_evaluator, crew_training_handler, kickoff):
|
||||
task = Task(
|
||||
description="Come up with a list of 5 interesting ideas to explore for an article, then write one amazing paragraph highlight for each idea that showcases how good an article about this topic could be. Return the list of ideas with their paragraph and your notes.",
|
||||
expected_output="5 bullet points with a paragraph for each idea.",
|
||||
agent=researcher,
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
@@ -1697,6 +1777,7 @@ def test_crew_train_error():
|
||||
task = Task(
|
||||
description="Come up with a list of 5 interesting ideas to explore for an article",
|
||||
expected_output="5 bullet points with a paragraph for each idea.",
|
||||
agent=researcher,
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
@@ -1718,6 +1799,7 @@ def test__setup_for_training():
|
||||
task = Task(
|
||||
description="Come up with a list of 5 interesting ideas to explore for an article",
|
||||
expected_output="5 bullet points with a paragraph for each idea.",
|
||||
agent=researcher,
|
||||
)
|
||||
|
||||
crew = Crew(
|
||||
|
||||
@@ -8,7 +8,7 @@ interactions:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
@@ -18,7 +18,7 @@ interactions:
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.25.1
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
@@ -28,7 +28,7 @@ interactions:
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.25.1
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
@@ -38,137 +38,135 @@ interactions:
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
YXyDACB/th//38/PZLRrT7ldIgJZnBmfn4CJAiq+LyCiIKJAAqSrdXLgkEHeFhU8w8wDSxSD5oc6
|
||||
WflNKJzM7Iut//7nz5+/fdFU5fT33z9/369x+vuff/78+XvPp/zvv3/+1z9//vz5899R6Vh1RXW/
|
||||
vz51iwDwor7+/fePmAWQRvTvn787bSEMK+stofY5z+A4vzmmFLXFYr0KgCU+lpRH0gctwh5hOJ+6
|
||||
HYvObV7Q6fuREQhFifllnkK6uJ9cVo5fC0uvRuPrc9leAFNbInanzsWsykWOWutesQOUh2G7vg4V
|
||||
aklssHCBapjux2yGNjkR9sju72au1duMYlZlhDC8onGvhisqFPvOyLUmDReMY4rq92QzXXxci/Hy
|
||||
3ACMpx9hnvS7J9PozDV8TqZJDt/hhvjz3VIwPq5EtVyshsZS31jTLmZINwyvfClP+xGqIULkUC44
|
||||
3JXfWABr6TbEWycvmYd1e4SyrlsSbOqlWbUqi+G8JR0LHts45GFaC9rvt7HxHO9wM27epxi66e2z
|
||||
Qyae+DyzDSD2KUqs3ZdPM3/TAqCXnRML9nbV8CvcbWAs6/Gqf3XEydJncChMkdLa/hbTN9uPcPVP
|
||||
PYWZdAOvpdRE8tbwyP449M0aGNOsnqfCwLNc8mGKyliFm1CMeHOuTT7Lez4i6j8JifLnbNFQBxFa
|
||||
Sb4ST2vNZneqNy/4WUHNdKKGfFWepYnGbSzgXeS8i1lRogsIX0/ENDrKRVc4L139jFKEBe8SFfzh
|
||||
hTk62UVAokflIL6ZawGEcywRwkNize/r7QjcFnTiFt93yJStH6AsaVz6zi5BSPfm8ahF6BYz++TG
|
||||
zRJOAVZ1ndyp0Nhs4Dg5vKD+0h/dST8tWUZHrpUbFlaC80BrpgZGF0bl2BLsXaJieQoOwCS+TOYI
|
||||
yC/WOEp1sKblQ6wTvxZDfDRWIMolJf7XPTbcUcIUaZUuMOdzEDhVljgHT+IPKqWjOXAafWtQ0XjC
|
||||
y8M+D1x5GzP8YuGEP3sfis9l2mO4Hu2OmXFdhiwVniVkYR2zw3VXoPV0sAE+TjER6/us+frR0pe6
|
||||
aEjAqUdm9NwooQQPNbkS53T7FGtSfEv4vVaTHYi2b7az1frwEXc6VoIFEI3HR4TkxXiywAiihB+f
|
||||
L12rzygh1kEqwmW2Rh/t8OVMTBoaXDppbg15QHaMXGrJWmb+HuHxKjmLTh87YfNgSptbsInwZgnN
|
||||
ZPldsxk1yQ5IwL/Ap6ZkKbwvz5ocOroLl/JkjNDYeksVdGisZS/8KvDCNGS4NZxmdMsiB+FQnoi7
|
||||
5ErIxUz5Ie+3SYnf7p8hPx+0DK3vS8UcXNrNgsAIwIl+Bzo/h85iG6Oa0XnfWfRjYYkvZuK+4Gxw
|
||||
l37D5ZJQXE0RXAwzIHkhn9FSHR8YlHEo6SAqXrPml1OqhdHTYcQbq2G+j10E+6U6YunRFeGq+7cU
|
||||
DOtOial/dcT9C1rh1Jojc8fc4cu+mijSbWLRb/ZWEdv2h1ohyhmxSIxenJ8nNQBrmnXi+tuJz/rs
|
||||
mzDN48CM3NugT1bUAsSszFhFji1nZKlzhPPkQdzvvbH444M7FLMyY54X8nB6IxPDfqmOLNjbVbN4
|
||||
5CYCH7szMbP8g1ZL2pTIOm8sEmi1jfiCvF7dNiePeU8FELuO2RFMPIWUXg8qYoJxvCD7acZ4paHB
|
||||
Fx/Xsvb8hi0xnW4upneo/8B6ryIxf9hpVsOrc6jCzw1r5YItLiw3Fe1NU6M/YkxouKk8AkNVD7gW
|
||||
buUwkdy3gbGsx7Kq0oQ/nQ9A6auYGfOWoVn+CioIt7hk1pG34eLUqqugxs/Y+XT7JNS1Bxte/uLS
|
||||
l9PNxdg7bo1KuFDii9rDWvS2T2H4RT6x9ZgmPd7hUomT75YdtPJjTeOli6F7t2cSYCMtFtbeRBCc
|
||||
h8NC4LU1ObXqgjhaBK+yaQ2/lrxneNP0zgqBj4ieg32ELuPPwfPySoefKS06RJ82ZcfxLaC53+57
|
||||
9MDbH/NeipRQq9tTuA7qjOFZsHBtybQiigSfufPVtNhm7gV4pe8T89/7NJmVmxFo/WxXzFi234LL
|
||||
b5VCZWVXRnCjW2v601LE+1Vgrr898MVePhJAxVVm6t8azcvDxMiPiwIzwaEhO+lwkYe8EAj+CX3I
|
||||
1fYaA1adI4ueBQmXG78dIbjbIZbryOYNLWkKrRdviVldj4j6iavDtXU4iR7Vo5m1jgcogCll9rB7
|
||||
cLqLdy8Yp/LGrLFqhvlGuhkSqTawqAx3viDY+yh/ixG5pOOrWc5uUqLddi6JIbO+mBOr+qFr5t0Y
|
||||
mXBnfVS7dOEpa2cs2iRoZq3jPkzteGLVJbCQJFlND1plClSOz5a13Pgp1iS1FvBuntJkKRZNRK9j
|
||||
a1PBuGkhsxXxCGH0dIj1+hTD9PPyDFzeYNxeLD/Zpp6dwQNvf8wOoymcRL3OtL6OJ6IXiz6suxWO
|
||||
YE38QyLYy5xPWmrClckXUj6qTbOyzMAgby0Py8ZTbpYuuAQgveqQOPsH5qO5KWQIWnkmTvDoi0WJ
|
||||
5Bqy4fxjpL5eOf8eLxGyD7c9cQTUJ3yguxfck43Nwq3ytOZvm6dwe31exCeZnmzX2LOhcIYWP2vB
|
||||
SL728sxAu1grZrJjJLP5yKiqRweJzpH6HlbRTjuob6qCN9/Tl3PByFLIiotFnDJxE340phixpKuI
|
||||
4SW0GSfjJMGrvZl0Te/DMCdNkkGtvQyiN0PMZ5PEKcpz02CH68602p3aYZh/W2DeOnnJcnaLChXf
|
||||
40gCM3AQb/nxCGb1AmLvqNgse9raYAbTm/jpebZmvV5bYE1GiJHZrFinL1ORdRcI3Wntq/kZn6GD
|
||||
G21TEjlTyNfp+y1BRUzG0u8uFau8himS1ueK47gurXl+cB9u8RoQcwwpp8W2aSEwTj0xYLSH3dY3
|
||||
UzVGccNI3Nz4Ui62rO00Toi3/f2sNiN1CnllYcpwU4dj+npVEDUdZVH2tIq1mZMKOGQuHS7YaBbW
|
||||
nkT1eNjHLLgiZRgOlj4i4X40mdEKKFxFO23BUxeH2Dc9tHhKBxfpHzEkx0C7WOwykR7GU0+Ybuu8
|
||||
mbaPtdfGb8XxdgiexTLziYKLXjnxtPbVLMrU/qC4VQjXbe9z6aj5LsiL9STeQ2F8GqTGBLaJboQI
|
||||
Drbmzn0C7LF1YX6UJZy+lihDy8sRsbDKdTPHojlC1LSUWemsJzPXmheyvmvOMDm2aF6FskT5W4qI
|
||||
kXsPxMlS5+BaqsOC52oM2+eUYXilnxMzs/zDx+B3yMGetz8WOVOIVt2/XZDUsQ+L1IgNc/8OYyCH
|
||||
MWOGyrbh+g0VF0kXquP1ej9xHopjgJbgXtFnElgJl7yHhPrswegybxmaoCgq2L1hYDOYHrXgXDGR
|
||||
vFhPLI7iFg0kTlsoI6Vg+21yHfiBm0dQsp4Q97Baw2RwwHD9KAFxtoYV8v7p1CCcY4kFnndOpvLp
|
||||
1pDGYYsbfcNDdtYkgC9PGZZu39iav2kBMM3jQIzguCTLt9dtZAxuOanP1WjmX/ORwDneI+LlUBdj
|
||||
rZ5WhD7hm0Swl/k0vdEIIYIMC6tcD7xbpxm1j6RnlrnZ8GVZxRkMSf3geq+9OF9vrw6sadVZ+DVj
|
||||
vqQfrwVBVDPmHU2jmbnWvFBgJD1dLp4biuUhEZCm/FQs6WibrKKdduqxnQ2il3Qu+IyMCNZ3WpFD
|
||||
rEYF319MEYRDeWIPYzyhnp4uPRyhfTM7cR/JYvLwiDotU9jhukNoWV5CjlaHAlXy94TWA7ldAKrW
|
||||
Jnt9t2vW7KO0iJHFJda7bYe5EHQd0jyYGX54SbJUx0eEcH56MMd+3cLF2yQ5oE/4xuO1nRLu9qkO
|
||||
SWinVK7IueC4CWXwVxKR/Wl4hXx/CUT0iAeTeYG/RVP3y2Vkds+ZBPftu5jtifeArumBXZVPV8yN
|
||||
L85gCn5F1X6sm3W/nkQonG9LPBNbw9ZsbBXIYcyIJRVRQ32L/ND4vh+Yeb474Zb8IhUKzbbIrStf
|
||||
w/i+PkbQKl3At3CRCqoseQ5PJ+3YfritIVe2ug/X1uP4+lQAzai/m7BufcKCC+qKydsUOWwiCliB
|
||||
g9Rweau4IE/4hXda+xpmdzzNIBvthzhbownZdIhL7eRnMyG40S3JPseZhuPjyCJ7voSS8Wk6iHa3
|
||||
kfLr7DZr5omidqvWhoTHU1ysWpXFwDjWib2P6mT1eTAjPTVFZm2LxJqR18twhPbNDIUVw2Ivzxxy
|
||||
82CzZK2OCXu1fIaD1M/MV1QRLaes/SEtIB9m7puxmU+pXMHVjgvaqJnPx7W6xUD9JyHkkfcWy9Ug
|
||||
huVlH1hYhgHnI9V9zRBQThf/aA/8sbMBpNcrxODNZbEan6FF+OXeMS2CnbXodtNpk2Mz5rivyJrC
|
||||
g1PCoslXZipO3iy/XSFAYVQ+00s6F1z6lB3a33ce8eg8WHwNzoEq1qZEkZsfm+maSjl008efttdb
|
||||
GnId/B78oJUYHoSiWDV8k6H4HkeqfMot70n38JHbTHfM1cznq+FREWqlLth5/6B8UUG14ej4DxZg
|
||||
I02WzCY2IrdxZWaP12bsuslHrfWo2J5dcDF5jwxDYVQ+3Q28T+YgJyYAnhNyqJanNV3e04iuHxQQ
|
||||
Szi/rWl5CTkc5y4i++oVhPScYxM867Khw/WUDUtTfi7IFPyKGOI6WuM+91tIzUvEoiW+D+tt51RQ
|
||||
Zqyjl0dXWOPwmit4PoOFhXe/bli126owKnFLDlrphSts7Bxuv5ZSITk8rd6ibavWyqsgh1gdi/Vl
|
||||
BJK6284lu5bKd6B92ecQJbPG9HiHG8aQ6sO3oDfiy6oxjFJ472A89YQFV6QMS1TGqlbqyMQMN7rF
|
||||
Kw9SdYcvZxIp0SXkzt6YNV6uLcHKqiT0bm9ewAh36dCbt2QhqhXDgLuIBJnMhne8mysNPa4627uL
|
||||
ZUnfcLHh0v8eJDp+fnwR9giDWOsSnqRXH87P769E8hPvqVYuNJwu515Vf+q6pZtrTYb5Kx4pZMP1
|
||||
R2GJ78MySIMJhiR/sCaU9bAaXiehWqsNdpaMS/Gbvh8ZMU4+zPAS2lDmm5L6yHvM/MG1GlG9kRYN
|
||||
guWSUL9/Bg4rn8FXzT3zrWfYjNlcVqi7zQFzFCEe1t8gxvDIhy1GVXgdFqJaMRiPk8wOT9qgVUxz
|
||||
AS6GGRDd1nnDzTDtIM2DGb+u9xNafrtCQEGvXibZeMrDIlePCOH89KCyU6V8qeneh/tnLVgwjnmx
|
||||
NCW7qD/Pd8nlsGoJrdJIULfW901+224saILGH3rkPSZ5aJ8aVl3VUe7e7ZmYP/xIRkHyW0g8lxLM
|
||||
0LVZo2MQo8PDzLF072jRN764okAcn0Qv6Vzwx3W/grjb5izqzzRcdlU2q/Eu1uj2c6j49KCrC2X7
|
||||
CrAcFMEwn9K5AppddSoMAkq4lb90oGjDmL/q5rAGS6+i0ZFsEtKNGYrn5/KCMkIF8X4c8ynQlBqc
|
||||
/r5hWFmV5Ge6moruuy5izrFchjV3cwAeAcf8/ous/l4+cyTFNMcqas7JuNLKheJ0Z3SJDjXn71cP
|
||||
UPoqJqRLPWt8DHaq4Tx5MDf/DgM7a3INaIU9w+5l5f1MYAXsVk8SzcQddv6Fr6qaeXsWLtCiqSl7
|
||||
Gaxp+eDNtSbNlNmeC9GuGPE7vR3Q6iWSAJ2WK4w0VzsRsRfJwKx5wknX7vha2oqLklhWCKmvVzTu
|
||||
AxNrsfXDzPq0S7EGSy3DZj6PzNTMC+L7iymqjW8tzM7009B/xUeASl/FzNkIj6Kr2jAFxsmHmOl9
|
||||
aHgqPCvY2GhDY8lJrdn/CSk6KyTCryZ1m12shzmypuXD7GHnhi3XhhfaDnGC07tWo3X32sro/C0K
|
||||
5uwMxn9S00lw3rcW3qzhdpiewgFQ6cuYldd2Sn5+E2XI9b0n2VuiXkzvwilR8U5cLBrxB434Y/TI
|
||||
Cy8hwxMK+G+RfjUc5w9nOA+0gR7IdQXhnnyJZcKr4E1rSqCjYU9n6REU8yP0WtQZmwNzX01r9f5P
|
||||
uKDZ0wXmsdluuP3VZGSfI49Z1c8Z5huhM3pX9cj8dv8MZ+0izoDC2mQ3dGjChRuPFbHhZjLnfYgt
|
||||
iTtOrQy/yGelNZvDTh/No/Z7rSZeVLYNuQ56rxZG6ZNLtP7CWQrPHVjuPmZ+FkeF/MqQj+LrUBC3
|
||||
radiNI5hjIwiHpgu3EwuFduhQ7/XarL9cikSpraPI7KULmSm5IghxdWE1euOIkzty4NToikmwC8n
|
||||
RLf02qK7eFdDNLyfxIBRKCgWvgCu7zzxzwwctFxbcQXjcZKZHUYHa/44ioniXaxhyJ73cPZ/QooO
|
||||
FtlhKR3NhvuSXEO9rbeEdKlnzV/xESDNjEQszdeXxbe9U4N1gy8p4pNTzIl1xNB+3I4FP7ZYq/3O
|
||||
ZlDEOGS6peuWqGYZhsA49cSXtDhcNje/h8+ZVOxw2fkN18fgCE5/3zBDWjNEqaylYCSnBwnZ5lyw
|
||||
RT3G8KiGAsMUW5xrL7tElvJZKRL4iLgq3nv4xcKJeZGzCcfyHl9U3LkWVZ/bhzVrrpCD8HVEclAe
|
||||
AZrPJx7AoZM2BBs3q+CXycBIqkaO5/79ttatWPmonuiR+HVkI857WYfbr6VUtp5Ds9J+rtH3SjkW
|
||||
RYqT3eO6XwEJ8GIEnCJcDkOfwlmLVkI+aV7w+pKJsDm+UgxjHBTzFvY/ABXrFH2c77Aqp3OPxoO0
|
||||
I1YkeZy/+WWEQrHvpHDzueF3uc+Ayy1j8fn+tkZ6qnoYgq4j3skU+fdBVxtwcKfM/OIo3PqP6qdK
|
||||
Mc2JzyylWVqEAqiKImA6UQe+I94jNP96l0Xafi3E1g8lAJd7xKmPZrM9fgsBlW2zEIfbfKCakJSa
|
||||
aGcqiabY4ltxfgpw7BdMgkxmw+IcOUWRBysJoA6LFW22F6DZWcfNUl3QrLlCBtvkcaFLbQzDuot9
|
||||
X7WUNmTpsNug1XQ1GZ2i7MJcWz41c+d+BcRt0Nn5cxDQJB+eFagNSklIUnOQ2k0vI7V0GZ3V0h1E
|
||||
4hYCeGEakgKn35AbRQ7o6aQdKbV9XKybSKiQPe9+FNzJKXYMqT6iO+FFN/5XHMbUfFMYVKtmh/eg
|
||||
h7MgeSaIvW4yO9pb1pxXgJGvmnuihxprFmeWSxid7UCwdZvDRlsDCfRI15mro3PCd13zA8N6UPoI
|
||||
OgtJi/R7ASINIdbr8+RMiLGNlKwnGILfc+AJS3WId7HGQoGPfAnvoQRib5qMOF9sLZb5wKiJjCs7
|
||||
xGpUcDBXDAP+cIZbw2nW6TbY0KflC8ujlRSz4Dqlqm5YxcxjTYrVcUodTq05EgtlZiGS7hqA5KqE
|
||||
CqtcN9ztUxPeQoCJdeQaZ/DSckDWS2JeAcmw1ds61Q4/6U2VYCnReHnqrXLNnBsdi+e+meP0IKh6
|
||||
I76ZwbeSNaVelKvbawkEh0k/rLeP36PutArM+AifgeHEqeEU5RfiyZ+I01F9YCRmB5no9RA161sQ
|
||||
ctTPbsX8Il6HeVqKDNQdHejWe70bNmDxAvVJmdhhr3vW8rkNHdzA/6J14ruEDxguKACWYqV4e8Uo
|
||||
jmKkGap6ILphzWjEO93Upr17Zu6jM5LZws9cu61FwgLXe4UsrUYKvd9fiRf4Zz5n9cUFULFOxatl
|
||||
oKk4rT9w0Sun6pftms5LJAF0NOyJ3+0PfNYuMKO3SV9UOAdBIgVLraLutArEK5XvMIbfsoQ4+W6J
|
||||
McmHcGlgdFXUBBnR6XtBK9EUE4rvcWQEOZ9m3Ea+Deza2cQwLo9kST+kU5K1LrAQH7xhceu9D4ak
|
||||
frB4sfxkHX9xrn0MfCJ6/3astX20PqDk8mG4M+JipnqtIkNAOYUxDgppvmZHNKz6xMK7Xzc8xTRF
|
||||
y8/T6cMbhWGSD98SZmIGeLvaeUifyzYFUb+llIfKwhmJyxa1H7ujAlqjZJXN/QrN7m4z3NismV5T
|
||||
9VOjcN0T77z5DbM++6b6EjHHQu7poYSrKUKHwhSZ9fo8+XQ516omvN0PMWc/DCd7eeZQnB6MEMWX
|
||||
C97yYwx+F60kkLrHsO7Xk6SdSuwRktbfhL6p0qIN+ISKytCGS3FaeySZ8pe5S34LF5wrOuqWrCae
|
||||
9LMaujezo5bH5pbY6rAr1iHELSLbCyUGk/WQqW0mIjN+NYQcm4O1nPwuBi53jDjkUQzvXt5KKBre
|
||||
TxY1hRlK2ZVHcHjtbtNG8k98xp/9D2bPFAgBp7C45N1y6LaZS3I2unw+npmE6hOacCZqG2tO49FV
|
||||
5SfeM3Kuy2Fuer1Dfp59iMvyKNwuLykH8n01dNT2a8H2EtZRkisjfd8TpZmtqsNqobkWCaeN3+xc
|
||||
Oc61OvB8Ru75NmTONwK1bJuFWfomCUcx2pXw958/f/78+Z9//vz5P//558+fv11/r95///3zd6qW
|
||||
6b/sAum/8nv+X6IoWRE8HfO6+vuvC2H09zv03Xf6f1PfVp/x779/dvCfYpv6KX/L/vPnz//88z//
|
||||
GAM=
|
||||
H4sIAAAAAAAAA1SXya6zypKF5/cpfp0pJdnGhgzOjM6AAWdiwF2NAGMMGGy6JMn78ld7n1KVasIg
|
||||
WUpC0Xyx+Pe//vz565NWeTb+9fefv97lMP71Xz9nj2RM/vr7z3//68+fP3/+/fv8f8q8SfPHo2yL
|
||||
X/nvy7J95Oyvv/+s//fk/0R///lr/XoH9NAniUEfqIshdtmIO7GvU16cEcBHybJpkwZtv1gmwoDU
|
||||
cPur92iMr2vUZ/0DC59+9Eb9oIQ7Xd7rWMArhc+Pep/AbipEYqcwe7NK0hx9XelB3Yd67EWIjyVy
|
||||
NVmnmvTJvakVbgNU5zOhJ1F6RwzdpRmdZvdOyIov/TRAPyFrKXLqPFJSsUM4x6hr9ibVWX71Jtun
|
||||
AIueEIqN5mGMz6taANknOiHD/Y5muTUnUEp9O0mOvUmLer3BCotFf1qjeOEz9qUBrpqDyF59YG8z
|
||||
q5UOXBNWRD+bdsXWwyaAuLnVxLm6rJqPn10IHW8banrP0OPZtRCU9WibWLhqOKKbULtAo4wHeqj1
|
||||
E1/aIVBhe92meDmbbcUvGAHEu/uJertPXjGkjCbUOGvxzklUxJfDLoP8Jc9T216+3lAm0gBy//1O
|
||||
si006Zyppo427tcmbnn4VItMBVWuN4aGxfnDPZpeQxlcg494px90PhOdr9EF3piYp3k2JtD9NQzv
|
||||
9ZVoh7NerfOElDBN5ZNaaePxOZQ8AeF3pGLUFG9vfg3DBab1aovpcJdQPbzYIA+ni49Z+/TThV29
|
||||
BMmIukQdE8vjzqEQoA1DkWh1S6JF+hwCaO8XlRjFukbjtPu4yDcqdfqk3gHRdF8EyihJIXWv2zDi
|
||||
0bq7yNtNmk/yyqP9Eu+OOVhb/TuxhSjRUt6vppSkwkLMaKVE9L73XThqTk3IfvJTZqQbgD1ONOqW
|
||||
B6f/yZcKxubQEk9Lz2l3n9gC30sZE+2iBNVP/DeULb5A99uvwOkDlRf4fp7FpOSDns7nx7eAazYE
|
||||
eH7fzj1DdzbDK5pO+LUQJX1fygOGTnTeVMXvFA0aeRVg+kFItecjRUvXxQCqzwfiOEnBmfbmohw7
|
||||
Ox2/lUz33t6ARCjc6krcLW89br3vGcSOrFMj7Q/V9r43HSi0SsNyICjedNJogPROe1G/FXyD9w9d
|
||||
VbR2FZHjK009ft/7Dorqy4Vo3U3j4mQ29W/9KEaZaPC2OQ/wJNlC9d3N5NNzDMWV7xgehuNaj5iY
|
||||
3gaUhTYQe4uBD1m1iqE0jgXZR3jrzeKHDSBYdTUpzb4ylkCSc3j7pkf1ZrIMSgYvgY02hERlkuTN
|
||||
z/pbouewisnRlgvE9uoxQ9ZS5tQVezNixVfDsA/l4wTJ+c2p6Qgz0odOm4qduzUW69OU0DPiTm3u
|
||||
nzlVbqMPq9fikqS4XTx2vlIMRtFnU/kzr7z3tFjZS/f973z1/LS2fIBDQrDym5+jeAiA2KeR2JWg
|
||||
9oto9AvYdTNQa7nvq2VjPCbUSFiYhqBCHm3ihyodr29EjX5fVgvMiQtfZKqE3IrxZ75vOnxp0VFf
|
||||
T7X+s7kwHe7n253GbKj5sEVqjqIbfRJvYJXB9XBqUH7b3amjFdwbnbAMgWzz4B8ecM2V1mCnzZlY
|
||||
xtD2vKKrDBF9axA9ArPnp5v9kWMuWT+8U7zBeO0CuHz2xvQVuYyo2swJkvc4xOyn/ov/LHaKYbc1
|
||||
MWV/9ui1VxvQHX1NDsFhz/mPHljDb3jnfrHBlbUkovqaC1PxvY+os0zuQ9G4GDeJknn0Tm4mnFnR
|
||||
YWH1mKIlTVqAEcuY+vOTosU7YxkqScioPqxqj8n3fpL0zedGky1vDdrGvQnGPChTUTkUTavsckNI
|
||||
ukzEV7VnxNBmF0O4BA5R18tUdY9BGKTJvq+pS66tMazlSwinsL4QFemx9xPvGoTY3lNrOBTGWLHE
|
||||
gnS2CRafrel91fd7Bvk+PGjGrwMarO7goE+dWxjB94K+TGUm9LsgpuFeExCXhe8HRY9jRw+5JFY/
|
||||
/J1A2rgMw9ei3nw7jAt6prpDNcb0ajD5TgCwNgHV50sc8SVlrhJ5QU7dvfVNuRm5E9za9YX68V6N
|
||||
FvmhxOgZCQL1xt2xmoNdK4J0r2RqV0LRz/6nxGiUUIhpK00pvSpaKB20XiDaWvukfG23FxA+74B6
|
||||
N0z+p5+70cFYijSdJ1c1v8FZ1DeEPD5BP16/G4DkfufEnNxnxbyEu+hmjzFVramIBs+3S/jtRyvA
|
||||
Vc9vF3GG9h6rWCLPR8X0+OAj8Rb75BmoZcVWayNGWT1kRM8en/SffvZl+06dV9bypi19C76VcsaS
|
||||
p7oVO7+4A69oONEHyw20PZVRDa/NJEyr590w+FHUQgW/TypefvOnqeMOXbqPPq2CXEmHY17HIBXv
|
||||
PXF2Y5ZSduhu8G5tgt/N14lEItU3+K2XVz4HNKLNHCuXpzUQw5HVfhGaLICQ9i2xxN2uYmZm6tCW
|
||||
8ZVcibyqmCBoGLZDamNuBrtqZqblgm2bLjEEAVfUtxFAYzszscj+k7Jv7hRwuBw7qsP2yhcNLAfd
|
||||
jOOBWNvXJ2JTaDfwLluTavnqZSz9rYvBflQvovauGoliezVBaZ4v/NJA411+OGX/7OsqjrRoVtqP
|
||||
KD/lhzhxm71TxpN1A+1K3OGNlH75vDF3MXSfziAO7qxocWQlRF6j50Tfd1M1kZYt0F6YNknCpk/5
|
||||
oBkZ/PgtYnRZyHmyD330ux/NNpmqqtatEHr/DfTX7/AffiNyWA/EMU77fr7SOQBjnhTi5fm64mNk
|
||||
OhDq+zcxvvFszKezXgMcboQQHlJvri0qo2508bR9WVXV22nawGYVxMQsQ4/zYLhn0FFDwlxUxHSm
|
||||
4AVoh5QtPrv7zPjpTwcUVXcJLhNqTEoc1ZA13w/x6cpMt21cmfK+lyuqKuGds1Mf7xQ62YQc63iu
|
||||
3o9ijiFfpaepy1nhDeWwJPDDF6q/GyOd40eUQ8/nwzRGkVaxcRea8qLcQ7pv1F3fdetih+7+TafH
|
||||
V4q8xf6sawjDg0VUY+PyeesjF5Vd7JFQ+14iKq3JB0a8wxSLMY/GVal/lB++4o1ovFIurx8TrD9d
|
||||
QjTGymqeC3OC1dEFPH7ODl/n6seFXbl6keMNUT7dskqHxdolZP+RsLE0gwaAhehCXf0Y8WnEQ4YU
|
||||
TdrgbSUU1ZyH4Qw0qCfqw0uNuJ1GDYKjnFBLr2vE3WrIEBcbnxzG7onmn30ExxPeU+++0nqxKz8Y
|
||||
jpV3onZQt3xgyzEBMxw7qn8ED/3wJUTOrm+p17a0n53CC2HtmVdqddeNx5AtWagyEgMryDpxFtwz
|
||||
jH79avlujGje5M8JqfKVTiu5p2jI6zQH0Ql66vz63Sb/COh3Hub3bdN/v9G6hrW4T6k/5Nee0akM
|
||||
wOUQEPtlGf2YFBkGc2EeUYfQSOciOdcwnBfxJ56zMf36HWPjtbj58cODEF1VGIT1iNmXhgbvMgS/
|
||||
3ydHPjKD1axwEDaCaFQQ1Soe46sI+knzyF7HRT/9+EV03bdvYvzwYbpN/fC73/Gm64r+H171L+9L
|
||||
XaFc8SVuzBneS/fGDTuVnPPVUsKQiSol12tYLXph1+BDd6Pm29MidhCr7je/EzuLlrctzlxAeZMg
|
||||
jLR0EzG8MSeZBbFGLF7MHutmzYfgW+SETK6fLs8xXMMI2Yle72bk9d3U1LDNd29qvMJnxfUJ+YiJ
|
||||
a+nHjyLE212e/N4/LeU4Ir66Hy7gPAaTuOG0jWbv8i3Q8pFsoiaful8YD1RobHf+6e8o4uz0dFAy
|
||||
Kk+qMunuLexqJL98xe0qGCNWMlOF86eIJvm2OqeLfUE7KCzDJ+ZQl96i3ZI1Yg+uUac3N2gonA4Q
|
||||
qTcz0UTpnc7XqPoAue+O9DzITcoG15zhoQfPie30oppt0NYgqa+aOEww+q2zigXw4XMje3fy+HSZ
|
||||
SIOo+jrSw+jvvY1dZzKM7s0gQSGV/bDJnwNI13rCwczEdBi4nIB1yH7/5xZv6crCh1Nclfh8zgHx
|
||||
Jn7osOg3Qq3zpUn/8bdWmCtYjEex4tpOsmBNmhIzxsp+uU9sBqf3G2KxU4lG11kyJVOdmRyOkmps
|
||||
UVDelF2n9tRz1hdvS7dRAy47z9N/AAAA//9Mml3Purq2xe/Xp1j535oVeZOWfaeAyJuUBxA1OTkB
|
||||
RBRQpNAWmuzvvoPPzsm5NSaCnZ1j/Maca6O0am45pqAZrlsjk4PI463fRfAWpVt0bLQqnk+vTADq
|
||||
kwh0J9oRn4jbKRBGlxd1Vn6Op+f9J4OdeTLppRHCmAywHuDD6BjV41gA7G4nKljuBzX3aKg5DJUS
|
||||
WuYqJx2aHYP8+E4EK94i5DjKm5Oz26eQS41PrTF064kola1B75wTLcxNPFd1AuGlTr0AZLcin+/r
|
||||
vAJ68aqCpp9lY76v46e2yk1CncfsGyM67AsoDcKZBr2axcyevRWM5sim37xgziX4AqKLD8hcn7Ax
|
||||
l+t9qd41XSRyfQo5OW/TDA67CI9h9Eq8KVhfOgi2oUyNScjzyR8dBb5BMhLp3PK629+pDXDxuQWr
|
||||
zdHmbMsDBj27yOmJRYTP3jGz4dsJ79TcBcnCl3cTnFV/psfSmGvqiaMNxlV+W/QywES3ugB6o34g
|
||||
innt6il63HX4SasEeUCv+AjRKICFV5AxHNuarIsyg+9R8pBlCi4eV/uVDhVu6KR9bS94wrmcgUvA
|
||||
SmRsJMxJvrOb7/fpvpJveBqLfQkzaS2R4g7zmnY6K+F8VxkNRLGKqda3KkwVt0H7WLYA3xZN+e13
|
||||
RN2RiuPjVehU+0XyhY+HfDIU96VmOizoteg/OXn+KBmU3wqk29pGnMyfzIbdXb2ibXDYeYO+v71+
|
||||
6x8t/M/bMlK1d8p3AaafrcELUITql1fcL4913sS08pY1X/4zqKyty9/3pY/t1WAbw0ghyCUfOU+V
|
||||
gGbRI23hW4oWPyih02TCD9bvyOtYz+fmCgKovv11QNOw8xbeLEBTPu3f/jWsbxtdXfSWaBeAwDQk
|
||||
IYFi532IvHrf8CxOWIdq6XeBcJ0rzIdGmsE+uOzoRTzF4LMOL9vf+2BHOuFDj54rtXrZAXVxZNQi
|
||||
iVADShVYaDd0b8y7V80gv+oORa/Jqym0hicIhsSlvviOMDPOTfo9r4DdD2fMKDQiWHW7DT1OZQ2Y
|
||||
kPYrqK4iB+mNxuvJXwlPeCUZDeraiLxJQ0AFM5LRKJxsBc8NoiE4qbc70WCQcJYpjg1Db5VTO3tm
|
||||
3qzcaKRiy7fQeeF1yvKbrn792ehnQ06oCXtQH5QAXa11FFPxoF0UBliCvrxJj43dQCPoCdreD+d6
|
||||
2oh9BJbzD2RhTXG/8YQeKG1VIXt7Yfk0R5sZdrdTRg3ejmDxq4KqOj0kovwp+TgqkQVFdXaCDS9c
|
||||
zJpxW8KlXohcFKDma6xv4V6T6Zef8NwLnQo2sWsi++7rnkx2P084J+cc6UcDxYO2WfIX6KyoKcdq
|
||||
3Y/zKAF9co/UbJ4TZumuh7BuLyyA/Oxz3Kg/JajU/hrM5XCqB7cLLLhVT5RI4FnVnLUdhIs/RXZq
|
||||
HIzh3DaJ5vzQO/XQiPPx6+9FlTnUsn1u9O7dn2Egpg/kJZKFxVnirnoxkEMtvG0A0S+d8s2Xglm8
|
||||
o5gE3tmCl5DioF/yzvleWyoEmrShi3+NBZ4WCrTTTg2SI5X5XNcbCwiXZoN8Uz9j2vhRoP2A/kgN
|
||||
aE2Yae9KgEM54oVHU8wX/VC713mi+lsM8efT0gCUZhZQI4kfeSuOXvL1P8h9/+B47sKfEpoHdiB0
|
||||
NybGxJoyATGidkAXvZEl2ytBRD9v6vfyDrfNKX+CsxrMwYVJFeC8bhWwGg85DUSf1R/5YKmwNi5G
|
||||
sE7uIh6NXIOg8y4BjX8yzD+3dLgAvTceyC2nrTdko+iDR4GtABj+Gyx+ioHbS/KoJXy8GN8ntYJh
|
||||
lHN6WL01PN7UM4Fr6n2QL4XPfPbukQTXa+SSaZ+4OZc1uQB0Wx8pmr2m/rCmTME9hiu6w5VZzw0e
|
||||
BeCN2wM1k88eT0dh1YG9Lgx06c9gukBzgAtf0ttrX3u//vSnaQ2KWBYZ8m7bKpvsXNk0AZqOxfb9
|
||||
DLWrn+mBUmPRYyevEtTjih3QtTN6j/n+6QW//LDkcR6ILWyCzeado2POB0DeoZeBLUoxtZc8VuLH
|
||||
/AW2mbr76nU9sGmdgHT38qgX7zkY202hqkv+FLy1XWWMi7+Gm3eP0C+vl9mhgtrr9kC7TbfySNE6
|
||||
EEr7cQhGfdqDxX/O8Gd8KNRf7gNXh24FXl4Pgo1p3bx5yQeAvr+LwTrc6vUvz8ZJKKKD4x0MNgLq
|
||||
gmxTiQG/Dw/OibyvoKdWH1QOwz6flHXlwg3rX9R/ipPB9/GFwe95LudtSCvWBXCpJ2RteZSzV33p
|
||||
4JLnUnsQ7W9/CeGDO2tqVo9rTh8bLYE3NN6RT9MED6LJUmjpeREAxTX4lO6FBiz1Qb7zhhnDWwev
|
||||
qzL85rVg3IjPSKWDaxCVO3djeb8MLnyEArdyAatX3IJiTdZo0RePTfAnBY+Hz4PN6LcGf3orE5wL
|
||||
EiLkUBNPix7AoAg/hC/1PE2EJYBhwgO1uQax2HmbGQ598qTfvHW6VUoCP1bB0eJncnbyOgFe3CgO
|
||||
fuv1xT49fEmfNeE5/mDGu1MDUJDKi786cJbsLAGKqX9DP+srq/n2pVygWyuUXpfnoz0hDbSc8oW8
|
||||
KZSMz7WPTPhWJkLtUPU9McSBpG4GN0OeNm3qX71kAnWXPB/zeelPoK6gRa0WzblwdYAE2cE7ILSc
|
||||
v0RnoIKlnyC3/3AwwE1caMfMVtHR0Q0uFe1uBffhKUDOXqaYWYDPIMvhjJz06uVcPIgZPMqDFpDy
|
||||
moJvfcFPvT4RWLQYz7rV+eo3H04HZ+vJ99OogOpophS1u5946nVnBZ4nYUvv82edk0T/KWH4vCfI
|
||||
jB86ll9lB8Hid4g2vywsG1uwgl/+PhH947Ee9RDsTvCF0qXefvlmKCkmk7TZ53K4ymww9OmTqMeX
|
||||
gMnxfeph06yrL6+BMXqcdejXnU7d+0bn7KMVLmBy5CB37dGaEcEu4PM8YoTa3ZQ/T69MgguPUWsH
|
||||
TgbDj7qHbVTX5N7PBpD9Wn1C2TiHyD8Kz3iYzMAGK2yjQIvjB57fT9OER5lo1NnSYZkHAOk371/m
|
||||
Twa7jtQF8TSd6XeewS05cuHHAAJFWrOP+Tc/bU/mM4CXJs4nuk2gaq3XJfW3PfJYsfW3cK9LA0JK
|
||||
oWNhBG8X1gc1IMoyf2DqydR/9c9Bd60e4tsx+85rqHVuYyy+3yzRFn4l4CHdPJofA6gQrFWkOt2d
|
||||
mlVDGalzfmmpcbDFegR4yNSX1wHkuW2HubC1O1DMwYo6c/IG1Mr3FayOVoocoTnWw/5OXXB4PRSk
|
||||
C2e/nhd//DsPCAiYMb9y7wL1hzUQ9tq1MS1AEkHgHAf67U+zluQvOLbXA7WvmRxz5hQpiE2UBLOD
|
||||
LTAqYuJrvuIe0aJXmAA11DVHS0/UqfCuni6Xn0xTIxxTr7088yH2fAJvOEmRQZtTzW6SZUE+Wzui
|
||||
xfEOL3lOD406yInaVgp/09xawZdGHbT7EY+cqW9/AIs/+OpTLF2Lyfrmu8gWgk9Ol/wF6vubiPa3
|
||||
6phPdxVIKqjSC3IFdQLzPthY0O/MgS7+siZAvWwhEFQTBY/pHvOuuEsbeFSyYDL9A2bh5NhwPKtt
|
||||
MAk7O2ZLvqUt/RA5Jtsbv/lcdijf9HDEUc4uu0kH9VXOCCsUN1/6sw+q1BypXpVVPZtzmYDlvpFQ
|
||||
cFaAdOG1gIeD5QYwrHNvuDX7CxTheCLsIs/GmO5hA87t8Ca84H796wfHg2h+86F6PAqrXv3eH8fb
|
||||
9niKr0qqfqySB5vjZutJYzXa4OA9BeocuqcxKj+Vqp2M1RsdbnsvJ1+/MsCYIHR9KzlbeyyFp0sx
|
||||
oyXPwAv/S9op0W0UFO6nJqvk04Fl3kSm6tB4k5jrHTBP4Yeix/bqMZMr8L/+dVoZNW39LtS++fjB
|
||||
fso5ay+rDkzzMn9I9tt84FUngI1h1Qht6qPB/DKNYNKrFPnL/9H8KKIKPt2toi6Eei5lEfehR7SY
|
||||
+Lv5h/Po+enhMj9B3gFnnE3wmsLxDS2UVvRgsFChErgC8BNcTte1wc7tkKqYzw5F+32BGevDHpB9
|
||||
0SHvbPmeHPdpBp0kKslTfc/eKKw0CM77tiWfYrepeVq9Xl/9W+bBdi1868GxqU1dSRfwuOJa+OsX
|
||||
PWbHOYll+QL/fLcC/v3X33//z3fD4NXdynZZDBjLafzn/1YF/slu2T+CIP2uIZAhq8o///rvBsKf
|
||||
D+5en/F/x64p38Off/0tw99dgz9jN2bt///8r+Wn/v3XfwAAAP//AwAoZH8C4CAAAA==
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 88142860ce7a82e2-GIG
|
||||
- 89de8402cc8ddab1-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 09 May 2024 19:39:49 GMT
|
||||
- Thu, 04 Jul 2024 10:43:40 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Set-Cookie:
|
||||
- __cf_bm=JUH9MsoOBdCquA9K9qOgVbrLUkBpuF4d2k7EvjnBOLg-1715283589-1.0.1.1-96.yHqUmYzRiWC7xZ4.TPKu5RqvB1EFecasHfb_ix62oXcsw_Sp.gz0gDk4Inl119IxRMINM613CI5HheoJM2A;
|
||||
path=/; expires=Thu, 09-May-24 20:09:49 GMT; domain=.api.openai.com; HttpOnly;
|
||||
- __cf_bm=4yTFGytKZgiKE08enzMel3PhLQLu9mwQY7gk_lH43Bc-1720089820-1.0.1.1-4V06JbYY1zuYkbV7SSudBUPyqHqm6yFze0ufE2rHtJTtOmU.XLh_k1M9h.a.oVJGC44GhO25bv5s5224Ic.p6w;
|
||||
path=/; expires=Thu, 04-Jul-24 11:13:40 GMT; domain=.api.openai.com; HttpOnly;
|
||||
Secure; SameSite=None
|
||||
- _cfuvid=DPNTnqcoTitPoe12n1vvVltuYsIzavX_dftsSEhQQcc-1715283589489-0.0.1.1-604800000;
|
||||
- _cfuvid=04ERWHryLOXBKqyH.MpOILpw8OtW9Tchzbutsc9OJnE-1720089820726-0.0.1.1-604800000;
|
||||
path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
@@ -181,210 +179,207 @@ interactions:
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '15'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '10000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '9999946'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_ace5babe6674a08d07fce90792eef7eb
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"input": ["test value"], "model": "text-embedding-ada-002", "encoding_format":
|
||||
"base64"}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate, br
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '89'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=JUH9MsoOBdCquA9K9qOgVbrLUkBpuF4d2k7EvjnBOLg-1715283589-1.0.1.1-96.yHqUmYzRiWC7xZ4.TPKu5RqvB1EFecasHfb_ix62oXcsw_Sp.gz0gDk4Inl119IxRMINM613CI5HheoJM2A;
|
||||
_cfuvid=DPNTnqcoTitPoe12n1vvVltuYsIzavX_dftsSEhQQcc-1715283589489-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.25.1
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.25.1
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/embeddings
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
YXSDACDztvb89z8/k9FeZ0rVEhBIuDPopTFBQMT3fiOISifSJEBqlQdcfLgwYYhq0ycWOOTwIO1h
|
||||
81r/88/f379D0VTl8u9//v7t6nn597/++fv79/lYHv/+5+///PP39/f3P6StVV9Uz2f9fVcg2EV9
|
||||
/vufPz418QbBf/7+TVtk0UC95xN7n/0a8PWNEcnICNjKbJqhe2gQ9Wr4Ayx1fzE8z0dC8Rr2gMLH
|
||||
FcIu2zgCDvBbsI/U1ODVDA+MzOeXsVewcrAIswt25vo6rW2Gcinhp4bEvzc/bfr7kYK+dDWqR3IE
|
||||
1ruc89CtpJLeh3xPyOkbRWA+zQG+nYE5bZoXxepzeYTUFCS7GW6dZQGJPC8UO20HmGwYqRrLTUm4
|
||||
GHsNIec4h4P6OWLz6zRgsz8XAqtlMKjzDUtzAy1XQ8s0eBpeP0mzTtkvh0WYXtB+v61gu3O5AXdN
|
||||
geiYPBW2mMc0hxUqFMInt7rZXHnJlCi57dQR2GnalFpHaujICvaAzwB15s8IKa0stGW+ybZ8NCP4
|
||||
jBUJh8Jgsz3J5xUON0cnHPKbhF0KjVc/3sEna/oixfrVn7ESzsmLLJzTsf1sYAsG0VGjDrPbYuOF
|
||||
HUJdH97YnC53ky2XkwHw8/bAzkKMYjGP6QNIZjTgsD7ObAHnOoIWi0aMlcMWzHeITqCSzwM6gsZK
|
||||
xO4k1aA69SLVFtk2Se33FdwyG2Ebb12wnTvQw68RACTd42ewNt/VgHSkF3QctpptEbgoaq9/ZxyK
|
||||
ij6x2y458KRMV7ROwZtNwqjx8O09ABHn27vY5W/cwvvX3vEJPeaEukYcqkpr+NQR2NCs1rqV0MPk
|
||||
Q8+HXJ+O4Wr5sOJtmcjnOjEFv4Uh5Osbw37H6Q0rBc+AXHb9Yf/0aqfV3gMNZlS5U60d4oAJRj+D
|
||||
Iswu+OxtXrP/hEiB3XZJkPQ9SAnzaLjCFPEVxl48BuQwoxGey+ZK1I4bwaYfSQsX7FbYFl6C+ZPP
|
||||
fAmokl6x6yaoICD+VHC5HyiRw+hWEF4eWmDV3AWpx/Y1sSVRU/CuShdXjVAl+/KRU2C3WCCAfDOw
|
||||
b60fQ2UMvhivsQBWiVkzJPLqUP325dnqbhIPL+qsYe0pfKepFHRN8bVpobrvOIl4zeNWjZsd4EDs
|
||||
STDnzZCB72U/IPZyw2Y9em4Jiuf1QA0sGia/k2iA3zAVsdO4Avjxws4dmpHMNORL2Vw8OfGhey9T
|
||||
XFEZMLb99FIt9iIkR2s9TNvunXJ472eRCPWwmJsS5gSO58Wn1ls9NsQdeANOpnHGVmb3gDn6aQAH
|
||||
5/vCxrhHCbVmxsNXkun0nP6UZMkPbIczjxHZVEubWIM8BXagQkTUnE+zVNm9htETpeh1O9gFO5nT
|
||||
CrOHcMZ5o+zBEiddDwE9dsgf0TNg/t2w1Iq3ZaofpGRiTf3mofvqNuy6CSr2w8HjFaqkV6wfydec
|
||||
b/JkwDHvikXAWxcs83Z3oHf1RyRdzxn40mQ6Af0CZarxvdYI1R2J8HiOI+zi2G6ONz98gNq2L9Q3
|
||||
vSn4HZEzQrNxb9Qev21CoenNgCivAhvbK2CTcP0psLbtC1Ln/h1QbRwVKDXIJoxFa7L7XJyqbJ54
|
||||
bFyfB7Dp70cGeD2zsdv01UTUhBlw/3wINctWZcNGYQ7D7GRQ2/g8APtIzRvaSeiio6T0DTkfLrOa
|
||||
WvEXIz+KE7ZcTgYIt2VAe734wTSCzwijJ0rRVsyFuR0/ugj9ydOpcb9FbLZ3U4Pn+XQnLZfZ0xJE
|
||||
1whGquBgfzgCkxbhM4U2E2WqvZsJrHS5xPAOdoPanECKfQoLTbGG/E7Tcq+TldyGFX5eWkaYXb+n
|
||||
fT63LcTG00diGBFzDYUiBV9HIeTNnSuTnq2Ig3GzA+rEXBz0+f3OQaFcW2yG1yAgfN/l8PmFFq1k
|
||||
z0m2lY0G/O2igmBu1dP65G4WrG33Qi23UgoyN1cLEARzxBVdDbZzB1ooMvdK9ZQ7BtsROQPw/PiO
|
||||
pC7oklVklQJDR1aoVXtWcRwjbYSTaZypN8RNsaltm4KYizE9a1c9YcJBzuCUV1eq94nbrMfSGME7
|
||||
USIidJGRLI/Dp4em6Sn0fFeWZL7LkQhFt43pK/UVQL+tvMP4/j2QJ+fYbMXN660cE23HdtX0YE2l
|
||||
aQQLvz2RUA9LsB9+90xxToqN9ubQgSn/oBKO8X7D3vtemEzTkhI0Fhdgb/l1RXf9BRI8XS8rdR6q
|
||||
l+xqAgxoLlGMhK+sBZsbDi28Xj2MA9uCydKS0wpd1Js4FAabHYVZPEHffrwxcoIwILz87pXp3Fr0
|
||||
Bog8/dLxO0It4D5IPpetyeahjABo2zuNr88D2GMDxsr1EXEkRdq72Xl4VUCD3waR19tvmiezR8Ac
|
||||
mheSNmwA5uinAW7vwwuRB3qzOYuOKcjFl4ZWb9/Z6vrZDLcAqeTI2nOwKk9fg/ztVWPnkB6DqVNg
|
||||
CdfumGPtKXyb9d7KCsxE84j9q9Ka223YCGgEOcOBnO7JmiHfgOHU+tgE2G32j1dKsIC9iy1AomL1
|
||||
he4NR+as6GiiwSRhIs2gOB5TtIm3zmSvz6+C909M6Pld182+fOQUys2a4kc720xcpV8IldbwEddQ
|
||||
Wiy7p+UQS96JekPcJO1OohHMjSKQY1HuATM0PYbTU3mS9QnEhjXSJQUogy6+/PSJjQYwNOhw3AG7
|
||||
tXdu2HLRDCjk5kbPl1UDAvXmFZ4EfEarNDlsTyxbhMLiqNT0FgH88tGMoXMPK7TPZjPNSQ18wER+
|
||||
xeGnuwTb9WdKsDqTDntfPTIZXO4nIJnRgANLdoMt+e4plJKDSqQN14BF+9OB71+aEw75ZsFu++qo
|
||||
1fBO6Olsjsm6W14IN3sVsKWU6cSm1K3hKW0LUqPX21xfQ5rDJvpsVNu4IKEZn/DwJTs11p5AnPa5
|
||||
uTrQX0NEz6N5MRfxJUVQtS0b3881M8mrrGMl82ueiKfsaRLrC0e4+iOhlngjCbsyPwL6fPRQa41u
|
||||
sQ4G4UHwriyirGuTLFV2eUP2Pc9oAsdPsOpFcQKS585IUNaTuZvqIQUV78r4XA5bw85WBOF5vxyp
|
||||
Zn1ebHfUB4RbZiOMnGAOFv7tV5BYjw8OtmthUik/GyCqORG9h9o0j44pcJDzdxOHsVwlM7+CHWRj
|
||||
ecHa75Ca7H28Q9D1hUF4N7GDdbjWBvTH1cauGNbJHma9Bl3Um1TjZ9osz5saAxe1Jk3J+QM2X2kJ
|
||||
iCXRxZ68GEDs3tYOP/wu0aBRQLG2P6+Hj0vdUof/AkbKrXjAblVu1BK0udjPVz8GnGNGNHiGx2Iv
|
||||
t+IBC9i61ECjUGwKRgZ87vyVbNq6mFtphKt8IQ4kt1JSGmrpDxHkb/LCliXAqRjBb4T+uNsE9qIV
|
||||
7IfkLcI2O1N8YtK7mXwHSQAdexl9T60+rXKxlrAWbi5aoCIHs8JXMQRppOFI2gTATvc0gkJubgRM
|
||||
8t7Qm2/l8O44FpEavglW9RRJ8LnkIbnqlgeW2pAhfOhhQB1mW8l+mNGoSEmnox72zNzEY1hDfw0R
|
||||
NvmPOW305Gnw2/MBtt19AUsELpLq3suUOlq4TjPy5RlG8fikesiPzf57vXPgot5EEt5IwgKr8RUt
|
||||
43ckcWNdzJ+rq8EwOxnUbQov2X7PrIbsYmFyYJoM9mMU1Sqk5IUOoYaSFTevN1AEUyIM4O/ETvcy
|
||||
gt85TqhnNwXYm4qFsA9fkPodp0/bsKkl7ETDoUZpC+bWl7cTEOLuR13ldpw2/SM68Pt5mPS8Tk4h
|
||||
7vK+w/skdmhSJj5h+ySusICti9MDy6YV+dsMy9u+USeWTLMLlccbtFeeUcceJECCw6QA+JiSZfbi
|
||||
N1tDoUhBQoCOxPM3MsmzhCVY5DrCzn1apvG+lhrIroZHA+0gBtMzNE/QTdMOqSxtg309ayGUmtCm
|
||||
l58+AUrHQw7Dj5bQgDdfCYm0+wlUA8DYZL5eiBe9JiDshRs9/aJpWnhB4cC77TrqpZhMmyR/Rzjl
|
||||
1RWbiDuBvTzRGT6l+Ek2J7wmmxX8ELwKzowEtQqLfa1XArP1SwmXwy7YXHHVwO0xu0TCGyrmmzwZ
|
||||
MJIMA7uGAZK9/2QZvKizRrgqcYOtuiMehJlvYMzJtTm5Rhyqr9+zwucmcpojdqAFKxUWSEKPsBC8
|
||||
s+LDNrhW1OAE26Tv4wWqVm5p+Ho5LxNbLpoBUQZdfKmRnmwz7+TghWKeyMxeJgKgJEGgLCLVLtsY
|
||||
zOaTi8D3sh7o6+SoybJ6jx7kVxFRv7bWaX18vwiiYy9Ta48Hc/1loIe3B3Gxl2LSEKK8fDg3ioA9
|
||||
yxkBtcith62wR0joF5ZQJ9tO6vnys+m540awvG6TpXBraVDTrt/TjkZlhViNjtQMC8XczioWoRCW
|
||||
Aba4rGtovPsSKFU/I4CUn2bOomMGf7cxoo6h8NNmfy4zvEfcFetirSTduQO9onhvgTDmf4r5ezvv
|
||||
wLiNR+o2PTdtfH1YQb8dazSrSxisXVoPwFuvKfracE6YM39GcJsWgQYvQTB38NVy9WBLPq2swyvZ
|
||||
uvuHB9S3XhiFkjuxqGxbqMX2gXox64NdiMYZ1q/DhxqlfQ22o1Q5sMs2DgmlXSX7Ra8JxJJ3or4k
|
||||
/KZZYtYMH1ulYzfWPEZO3ygGgVZ/se7o72CT5O8IPKXG5MDJddDtnpZDq4YXqlmfAyO/sAuhfmMq
|
||||
DfrhMFGO8jHk6xtDwg1s0+8Cp0q5+j+FOp/RAhsLBAJ/7Cxib8jjZFV2pYbiTZlI0QVkYrH3QvBl
|
||||
EhXB8+0bbJy0OfBDl4k65X1j++F3z2DjM0w9PXeaNQNohYJrmNijmpuMfKeP6sKzJ+Kt0S343dKR
|
||||
ahzblBwr65tsu6flEPDSjiR3PwZ7ZKISvvgbRTdfj5s90u4+3AH5kd15nxgZRDhIRza1OBQVvdmN
|
||||
AYlw2y4S4dyamNtUvFu4WM0XcZuPTcZZSQwFd8nJfNK6gJFDloI+WL/Y6WgQsIWzdsh1cKGGURjT
|
||||
b3+XA+hEWlCzU27B3glqDN9V6aLve5+mPb9fIBCNk08rdM0n5pDzA6hX9qBWUt/A4jX1AE6twqPd
|
||||
3K1GSGKdh3b1TLBh9EcwM/uxQovFI7VUXwKzZt0dMJ4Xnwbcqkz7tXAVeA/eIdZecT5tQXSNQUJq
|
||||
mzoskifyDZIReBZ/wd7kiMl6l3MRrJ2QU1ucdLAVkzGCkzf72Hqrx+YbrtYJ1p5oIQYekM3olQ+w
|
||||
THSOOvfpPO1VkRMAfNvCWll10zK6hQjIQW7w9zD40yCuvx5+9arH+OSoxcJJsgOO6tvBwQF+E7Iq
|
||||
IQRh5hv4fOogY9+L2AJ/CnQEjEwy55WMPLiD3cCm1HtgO350EcqDZFJ/3sJg0z+cBWB3Gqivq1VC
|
||||
63DNoPQzPRxK2RjM1iqX4E32N9WaG8fWi3h6QwrMkXpfPQrmgPdWOJ17C5/Tn1Ks1NRFaOcDxgjs
|
||||
VrB2txeExfN2QEpKjgn5sDaDckVdMi2+Dfbw9wmhQ4Yen7WrngyMjgjOv0xFs51Ttp+vfgw5DYVE
|
||||
UcKpYdD0ZgioH2HTOWzTb/XFVIWRW1KL5EeTqL+pVsB4f1JHb/aGSBwRYZmYHHbvBz0QnjchAqdW
|
||||
4ak5XeRgSeA9AvAR3ait44O5/JhrAOPYpkjZz3wyyNWyw0ueFdT0ZaM5VudyhqfD44TPl1VjQqY5
|
||||
BkjulYxmYbDBT6l1pF5UolEDhm4h1n5fgWu7JwThj52Q9/5YwQtFPPVZmxVs7GQfVsnLpbolsKSv
|
||||
9hnCLkA52vYnbzLrFqTyhfke2TLfBJvduhqkvHJG+5ocTBYQpVW6AOWInfcyYNDqSuWu3y5oc/M6
|
||||
WH8Z6BWusEp6EetHQd77Y4fBu7LQMbMjc/7q1xgogi5RhGeloTNkCNajsOMzv87T7O8SAqy8Fvhk
|
||||
Liv7PbWfAWbUYOoOm8F+7xwOwM5HTB3N0aetNjYOikvGsKfnzsQ27boq9/L8odbDXwLmGnEI7o5l
|
||||
0Zgcs2kKrOYEHxk3U4fZbbEgMFuSk74mImrOpyHgq+UqUKhIPuKtC1ZWfjWlrCeDGl1Bit35jhWU
|
||||
GHgQ+dO5TBCCqQbW832i1+Wtgr2qkAb2goVkvcJDwaRFfSvGPJv4OffvYP0MPoL5xS6odioiIDms
|
||||
bGEfPiH2BHYBu9AXIXjo4E0OTmsz3n65IizUpCSq9fg29IOPPCyN9EetpL6BPbHOoiJVvoSAWbeM
|
||||
PWVXAr/zCSABb53J+mjRIE25FbvHX1LM5yCQwGavAhGMYgL04vwUADBi1E0JMren9tPA7N3fpE9x
|
||||
V+wGMDR4pamPul9xbJZ8DCLgg9cD7ZzQmWuKUgRd1JtISsS0WbH4mKF5TgVclikXEMgZFbxd1ZSe
|
||||
7rFqrrnCt9B/f33EOuVmbgm8x9Bf5QY75X0DdD1YJ+C/O59qidkx1nTZCdzBblDrVp7A+vh+Q3gu
|
||||
eA2jUHKnzaqFCpo5dpAk33Eyj9Mpg4aqldQWXkKwtsnFgt9PbtLX0m4NXX/FCFbNOVK/uPQmW3/F
|
||||
CD8vI8N6Lp2CjVQBgU8pfuKwF61gfz+ZCPrS1UijmArb5ZiNYGmfFsYn51kMKLyWMABZSlRIG7ad
|
||||
VVcEj63UCQBH3eRprmQwOncR3fDOpeSQpRC0/R2fNlyD5XYV3xB/RYoO9TEL2KcGBgQKFYmcSXOx
|
||||
QdmMYGRkFtasbGx2PnZKCNJIw8nvoCabEkYzbKJmo9rxwab5dFIg3KNawxbJj+ZSU26GW0diio2O
|
||||
TNs1bTmo3AWE2lehmccX6mN46GtIrfY4NOswizEAvLQT9nLnSewENYbbdpHwWbvqCV8nTQQ8u+nx
|
||||
qa+/CVOWJlWH+IbIgZMNU7jptx6iD59Tz6HDxPr+WkOJc67kfRWjYjBP9g6j5LpT3xP7aXcuG1Qf
|
||||
u5eR9QoPBYO39wAprSx61ve9IEJUE6XVti91GldgzJzqCNz164VqVuY3K+RDB67+SLBxjcdpS75K
|
||||
Cpa7SmlojMHEb/ngw7U75lg3qy0gl0k5wdUfCbbVai6W57hW8PY+duTdDnvAGuQpoPEfCXZgnwTr
|
||||
R1UIhMJLp76uckmrBg4CE+l2bMcjNy2Lx3Hg/okJ9kekmvQVrBzwHu1ODj4FwfyGHx9+9bKnji8H
|
||||
jOW8PsDfYuUY4Y9dHE8LQHBGfoJPDW8GZD7PvdJ0vYq4ojPAnlg2D50fYNQs2yfbwVd7qOTRcPT8
|
||||
ro2JTGFhgK/hAWo3BxosF9hUUC/rlXox600m6gYHUAZd6m1kmn7guM3wG2Yi1Q8Sa1prlUsoPinF
|
||||
56vgBnw7cQqE9jnAJ3F7mCsSdB+q17xCnNIfiwk70IFDCWxqSr0Hjqn7i+BYmQ3hcmgHLN4NBaCZ
|
||||
DBiFXNas4vrrYZp2GmKHdAHtaygf8KY1IWK/g1qwpn7zAEZuSZEThAFjWtDCWri62JMlydxTY7SA
|
||||
Z3967K4AF8yaGS/zYXeh5sbx04Yu6QD3oGTULy69uaqBg+CC7YoiwXqbzMlkH0buHtKgHw4NSyu1
|
||||
Bvcsf1L99k3BJsl0gIfpYOHw3stg44Wdg+87Tal7P3zM+cm9HGgP3ECOvr43lPwWER704Eaa/HVr
|
||||
1sN7fis+sCq0TlkWLGmSPGAjgIwo8xYGpPd9Bz6/0MIes8/NMm8XC9YHhUPNFPgmL9haD07XZMVB
|
||||
lbfmfGnsFHTa8MFh6m8TvflhDub0smHzXRGwV0VEYCndG+pNTpasNFdS+EoyHWtT8GZL8qscwF3H
|
||||
FKksbYOfgpEGbbLNaG3CX0NP9zRWtUs+Yudl4Gb1X6oGnst2ok6bewHb1HcORLeNkVhBbTryU9vC
|
||||
63qTieRfHZO1cq1A8GpCtPlOn0xpqYXwZ3QW1o8yTAhrJAt+DQ/g0y8Kms2Rkh2Wg+sTbiL91E9h
|
||||
YcATPsU0zWwHrKIrSPAOdgMjPCsTsaLhAWaJ03EQfKdgU8J8Bt/Pw6TOQuqEdMEiQmvI79gDVsto
|
||||
uq0ERO4a4udZpICcD/cZHkB7IjC3jGbVrIujzr9MxdrhPjJyGu0W2AMcyFsf+oKpZCRQtS2bYgKS
|
||||
aRsePwR+7CxirfwpgB5I/lbRsZfxySmuAVEToIEyVN6I659JszqsbMFzmO7YPH97MMjbm1PhashY
|
||||
Ay+ZTbcXL8Hbg7hE0C0PiPWutWA+kQBtR/I1mXb7Emg96xPGMYmTfT1rCAbr+sQ5dJNmC6JrDHg9
|
||||
tRHdfje2EmvpQbdKN8LsgU4rv4IVmPVcYFfrxmJVdqWG3vU0YuP35qfdjPYeBFlyoi5JHubmt3wI
|
||||
OufVYWtMIrZ+R41TVV/2qC7YXrJb0fAANvdYqZXUN0aD4piDf//5+/v7+99//v7+n//65+/v3354
|
||||
Vt2///n7d6m25b+rvqiez/r7/u/H8/HfPC/++1///P39S+bHu/r3P3//88/f39/fv79p6H/L/7cM
|
||||
bfWd//3Pn/hf//z9/f39uwzLo/v/lqGtvvO///kT//n7+99//vcfAwM=
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 88142862e9f182e2-GIG
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- br
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 09 May 2024 19:39:49 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- text-embedding-ada-002
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '22'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=15724800; includeSubDomains
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
- '10000000'
|
||||
x-ratelimit-remaining-requests:
|
||||
- '9999'
|
||||
x-ratelimit-remaining-tokens:
|
||||
- '9999946'
|
||||
x-ratelimit-reset-requests:
|
||||
- 6ms
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_49a762eade78fd2313d74a32fa22ead7
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
- request:
|
||||
body: '{"input": ["test value"], "model": "text-embedding-ada-002", "encoding_format":
|
||||
"base64"}'
|
||||
headers:
|
||||
accept:
|
||||
- application/json
|
||||
accept-encoding:
|
||||
- gzip, deflate
|
||||
connection:
|
||||
- keep-alive
|
||||
content-length:
|
||||
- '89'
|
||||
content-type:
|
||||
- application/json
|
||||
cookie:
|
||||
- __cf_bm=4yTFGytKZgiKE08enzMel3PhLQLu9mwQY7gk_lH43Bc-1720089820-1.0.1.1-4V06JbYY1zuYkbV7SSudBUPyqHqm6yFze0ufE2rHtJTtOmU.XLh_k1M9h.a.oVJGC44GhO25bv5s5224Ic.p6w;
|
||||
_cfuvid=04ERWHryLOXBKqyH.MpOILpw8OtW9Tchzbutsc9OJnE-1720089820726-0.0.1.1-604800000
|
||||
host:
|
||||
- api.openai.com
|
||||
user-agent:
|
||||
- OpenAI/Python 1.35.10
|
||||
x-stainless-arch:
|
||||
- arm64
|
||||
x-stainless-async:
|
||||
- 'false'
|
||||
x-stainless-lang:
|
||||
- python
|
||||
x-stainless-os:
|
||||
- MacOS
|
||||
x-stainless-package-version:
|
||||
- 1.35.10
|
||||
x-stainless-runtime:
|
||||
- CPython
|
||||
x-stainless-runtime-version:
|
||||
- 3.11.7
|
||||
method: POST
|
||||
uri: https://api.openai.com/v1/embeddings
|
||||
response:
|
||||
body:
|
||||
string: !!binary |
|
||||
H4sIAAAAAAAAA1R6WbO6Orvn/fsp/rVv7bdkTth3zDKZIKgLu7q6QBFBERkSIKfOdz+Fa/fp7ptV
|
||||
JeAiJs/zm5L/+NefP3+1eV1cx7/+/vPXqxrGv/7Heu2Wjdlff//5n//68+fPn//4/v3/niyavLjd
|
||||
qnf5ffx7s3rfivmvv/9w/33l/z7095+/GpyZFHdumi+T7J9BWuUz2eglAVNAwACtq4mol9gfMC8P
|
||||
L4b7bksoTj4NIKfmJMFNq2gEdsM7n+/7pAKXnMuwr+zfbAmzSIGHUogxqi+nfj7co6MUmFtAiuLO
|
||||
9Yv9Ua7gGt41GjyUCCz5Q5LgDZdXelGWhY33exkC5zUF+JhPJmBjV8ZqPhshdVmD626vcRY47/YH
|
||||
Gj5OLzA9iBGpQ15fCZxKrx7zJL7CVORFbDRcDVizzARytmVQbV6u5tQgUsH80HAUx2ZSL71zucJR
|
||||
SUOknoUJsOLYGtDX4w2aBgmYwzV7pvDAA5ksD6ViLCj3sWKm24Xun5bbT9pLRyp5ygrWdzcuGM/l
|
||||
o4LcWbHQxEsmmw3RPML7zpGwP1xsxl5xOME4P22JqI11PWVFxKn80ocEhjuSz8F1RMprxBX5iOBV
|
||||
z5VHLTiflC3VhNMzX54npsHzPS2xd58u5sLZqQ8oeGd477dGQIrhScBZaD84SMKxHtswDqE+hh32
|
||||
5nzOKQ6IBbhxHtHSH6yEu7htAbaOL1Lsx3ZCGl8oIJlGhH0KX8H0FvoGWoapoq1Nb8HcwPsGyG0Q
|
||||
oe98zD6ZFVXaiz22/UwHs/JOHZiF9IQE81QlH5ubOPjDzhLh5UeZz/vEeML0uWfYFvWeDdbP4qqz
|
||||
X/jUMPK2ns/lo4Ty4ldUf1Y6EJ2Cc+DldlDJ5NuJKdZ8GEL/SheMwE6v523mGdCg8hubNX3mDA+5
|
||||
BrWCXOjOLQ5g8YCQgm7wI6ydL17NHpomwNt0SRELdamejMd1gtjjCoxechcM/K3ovuMnTA06sDQx
|
||||
ekL22N2xtzcE87PsnldgQnjCWJFxPi7tIYPNORmJ8jOn/WiP7QC2YnZAQj7fwfzuxxBUs+vi/IOK
|
||||
erpyXggaQBmB4s+5n9ukO8NPRd943wG+n1/oOUCuK10a8BVvzjYncVDVzC3a5EEDukcXh0qK2ED9
|
||||
SXASfp1PVXIMgPewGQG9124MxgqpiAcgZIvOv5+ASI8N1Z9bIxHV3TTA6u0K2OZzqf/04rLZKj0a
|
||||
qH6vJEbYs/ZhKqRH/MPbgDH3dLiqNy/ABBB32y+pJaXQaC2RbLj3mMyboF1gjGWfepkgmYOYcAbc
|
||||
g/Mehw1uwPydDzur79ioRcRG+2hK8HMvdLqbAoWNtE4WuFMBIlIDtJw1iifA+5MgsjGdyhww8Ipv
|
||||
vaCLCOx+tkIwwfKz3+OLcFpyUt1PHYQf94Tivr0FMx9WlmqSi0T1nZb087f+0uoyY68ZUcDeeqYp
|
||||
1846Y7uCb5PiITeg2ECDBLX67IfrIDtwnQ/ETfQD3uCQH8HxcJWo/yNqidCWRIAS6w7YdbZ2LQwD
|
||||
bEDweB3ors2G/kPAuYNZUf9QjZdqRiJbHkBS5DnGbRuyntM/CiyBjpEwb8t8lCVfgPmY7cjExVO9
|
||||
TLJxVM2Mcdjg5S1ggabEwNHONsb+pwgG9jAd2LQ2pa4bbesWJWEK90ZkUuPG5QGLveQJ7wfLR1sc
|
||||
Nskwpo9JPXPCG5u9GZtLkewUECXzEykU2kH/xcfTsTqi+XXOzWlMHws8nHmN7owO1+R1M13Iy9qF
|
||||
1M+P3VNBeEWQp/oOa22u1MNjfzvCDyQSNb13D5ar+IihvakMiuQHydlt6CRl2bkXWqzruYSJO8H1
|
||||
M9mSvuwXfOOe8A3VPZo6fWQTvwmewHF9njQ/x3tCvnwyGzGg7vntgeaeyxtoye0Te8dr0JMkepUQ
|
||||
ZK1F063imEucdw40WkdEwpZV+SKpYghv2fZAEQAKoHxra1+++uIXmIJ3/4TCBRypeynEgFnteQC3
|
||||
PckQe35eNaMOUiDffhSK7dTK+U1QLrBpupBirajzCSVWCg7UwFTr37o5acPlDOHHP1F8rF1zWi7L
|
||||
BuxVH5Ol4Uw2RMahg5f8BqiRlWMyJjAS4PV8jekP14J8bITP8ss316tkJ7OtUE0JpWjGX/5lyQ8Q
|
||||
gHK/pEi+sTGYbsanUiZBcNBWvtSgNy6bK5Q95wfvpiBjS/NMImAwJcA2TMf+NXW9BLnti1Jrir1k
|
||||
VrbZBnaDG631qOWzaqXttz5xUKiAUXt2JwiyzsLh42QzXsdOBM/3rMRa2YX5GI9To3zn/yiVUtCz
|
||||
+d3B1l0eSNT9pznvPjAC5BVd6O1ubXvmFNBRftxrTw73tqyX89gYoLqVJlGcTwtGHTvxFy+QNIdm
|
||||
8DsepTZviMxTaZL8/bbA4SxqiPXlUn/XC3LisiXb0d7n7CLLEE6xWGPjqMlB/zSHEj7iJMVWB971
|
||||
ZHoXBcKUCdiO0NNcxqsuAPxjn3DQH5aE8a5iwGN/9bDBb3f1Eg1XDu5Pyg7rwxT1y7fewqagaFGu
|
||||
rTlkP+4ALgpIEHhmr2QafK+CH7MjNIzfVc1Op88Rno7lEd8RtRnXgU8ID3kWoKUWCaBknlJYnl7e
|
||||
uh6f5DWS6AwKVnCEqx9LsGjtI4YlQ3ciPG2hnr3y0AKP03a46FhvtnFqaDDt3tq6vvua6XZpwFHt
|
||||
Z2p5hgaEgQsHKDc/aOUPh03O0SaQ4zcq9YKeyz+RmCBY0meGeHysAI0UEAO7Gih2uTICU6bWHGyv
|
||||
2Qv7z22U/PKXKl9bvC9GByx2VR3hV9/B+F31s3+6Od//R7ZaYebMCCdfNXs3oUawdOZynS4uFJdU
|
||||
xL7mHfsJye8KhjhsSFmNpfntJwhtb6GBlwb1yg/TVw9g96IL/WKykwP9y3VP9z/JIaFj10Ywz582
|
||||
jsU7M0cxN87KPlB4MoGmMEn0Csk/6xHuSLLy7REEYeCjit/u+iU4EA5ET98kAoxqc7gZjxIeyhtD
|
||||
4yZ8BCxivQugHSyILYprTp1+v4KL18tYn7u5nqcgkqA5vAS6l+PSZDLoJGhddYQdeu/BmCdZAW+J
|
||||
/8Du8MnYyPeqAw56pqC3RExTWGBjwOpWmVhbfz8RXCCAtR+xt0MJW6yjb4GRF3Xyxeuv3oLHH87G
|
||||
3mtf1dOps+H3+9SxKTVpRtQMjKZr0fimPcAEtscFrPyCzSw1AO85nADzF5KoXxkgnwpP7qBQOU/q
|
||||
uA1M6HgICsi/0Q9FbByC6UH8+Kv3qLnbifmUvkEBI3HaUacO+Hy+YWLA/XvwCOdvBzbbx0CST5oS
|
||||
kZtTKcnYfTICzvumwLtEegc3r7x0MA2qHVGjq5WzDdYEuO9Ugt13WrI+FhEHjveOR4/S1vOFz7US
|
||||
HpS7hYjwlPNRxyiGYKdpONEnHsy7DxfBlS8JGAJWD9/6WfFwrb86Xz6fSIJKHw7kfrl5wegTWYJg
|
||||
sgKKVcFKph+JIOV5lTH6rP5juj1gBc2fEmOsAROw7e1jwXzWQqxX9xGMqXCQ1LWf6Q68poCMR3mA
|
||||
RFJudD9LXc3AokWg0ZGDBD0m9cRvzEb5fCwOCYb0ACQrz/CrN6h2gJ45+Y7Qff0Hgd5bBssl1Cp1
|
||||
7XfEQx6Zy82iLSBPoJCF3777edauxy9fU6M65P0ciUkISUMh9dNZ7+ew318hGAqH7vKQY8sXLxvj
|
||||
86FOuxX75VtvBejMr37MhdPOUOBNat6oKQ5cMl1OzQTzMd3h9KKf+y/ewOLRLRTBFNT1Hmcl8NKQ
|
||||
Ue8+ycFI63wBQ/64Iu3tl/XC9SACJ782ELhpmI1PLbyCr1766tNO/LlaQAozl5rr+vX6YLowS8sP
|
||||
2oTKM1/9XgQd7WjT89o/9HTEKUy5Z0zd3blIiLS9uKAEJsZuz+u5mJ+XCqjb25k6DA754Pq9A/bm
|
||||
2FArOpN+6t13A+VdfMLa4e72E9a2A9SWc0EU2zzVv/7W3sctgs4U9rNqRR2sHUyJzIpXMDF9skCc
|
||||
/2xJ43+igN4YMOBUIwMHWQkSNjwE9MUfIlvcLp9VH3EApIKBw/34qNuyWVxVNNQ7ds6qUwu5e7Ug
|
||||
asIMcU4V5lzZKD6MNK+k6MDZCb3IM1TFl6vhSxWN/fp+B+5P0g6fbFVPljMnuCAaF55sVn017AsX
|
||||
Qr88idSEpMvp4VFcwchpW5qXR7UerLsfg1VPU/MxTP2qhxD01E6mule2Jou9vPnlt3V92FhFWwSN
|
||||
c8xjL6m6nOj8u4GHG9oj/iqzZDz5s6uSy82muOK6fqj4LFWi+KhT734p82mJOw7mr1CiSNRlNnsN
|
||||
FuD+UwbY/DiNOdSBz4FT66dkuWkPNm7fuzOs0yyi+Kxy/cwJcwvJoTthZ+2f5nULfOV4b3ki10IJ
|
||||
xlxWG7DqO2o9wAYsSkJb8NUH/Z4Pg4XrWQSenJciwsbBXO7HhwDgWeWoRTCfsEM1persX316K953
|
||||
c06pPoHgM5XYsw87MOmZ1cL3c97QdbzBdIJZCz2EH9TXmlPAlAo50DJ0FUlrPc7Xx0Kgr5xcursN
|
||||
n3z0HI6DUXzWVz/pm4O3lD4YNKfFzuoHlyIQl189yGF5D5rUmlIosfZAnZ/jNhnf/SuEcIMhxbje
|
||||
BkOXcTEk+M7QFkZz0FcjdJQ1b6A6iyzApPBEvvyDrfoRm/M+8RvoTpueJD8SWfXvFkGHZioS8uwd
|
||||
sO3t4UBpz/dUk8O5ZjvkneHq76jPJw6b3jc0wZUPsVbuA/bxykOnhlmfI1mNd7kgbQ++epeOMZHV
|
||||
+J188x34EkMOyScoBgsXF1cIK+ai27VLTHZCFwQFnrSEo7xbj/FwbiUjTOpvXsBmahABHqoDT6QO
|
||||
E3O64OkJNzB4IdUzEJvBnsVwN9ghafbNK2B6cA5BYsA3Nj0/yKfDZClQ9I4DRWfi5P3ZvLbAlVi+
|
||||
6vmfYA5q9QzrQTui3kv7fgH1gwP5NvRonNRpvljKiMDg0YwGjvQT0O5VtcAWzgwp/d2qhfvxwcGH
|
||||
djvj3UuRgpXPBkjZ0lH9vpVzcp0uPuiDvUctlCj911/Bzi1DHARLGrCM8BlY+ejbD2Cculr55Xft
|
||||
yISEqTuJAEF9pNQBZx2wt1v54OA9PbyPmGq+Z/gM4R3HO7Q0BJrjVw8H9qzSVe+teJV2wKxUC3tS
|
||||
2uRDKAQCMMJDjfvP0ck/X77VL02DjXcA+3G+yD4Aw9XBu+PSJmN6VS0w6mcDewRAtrwv5wHAcusg
|
||||
Tiolk6KbMoGX2ug40HQ/mECpL//4zYMXgOXJIw08suubelxc1PTeRRm86KKHA/P5yYmyl69g1T/U
|
||||
k+jWZHHnljANtx11nBD3w/3gTXBvOTb2L66Sr/24wNp5IhzmG+ubN0F4obstYrudmNDT+ERw1duk
|
||||
GgK7n9DwCOFNer6xBVWpfgmJgmBq+woifkbZ1z9DKTL2RHyXfTK/6KWF3zxon+6Xvi8ezVHNquD6
|
||||
5cd6EOx8Uah6uVLNbpeaxiISIJHqDd5vQj0Q32/eBS8RcXRHD3JPtPYTgTV/WPWfZi5C6whg9XOI
|
||||
W/Gmq5txgfJAcuo/eqP+1eMfTnGxrW40xu3yk/Cb1zwWZuUtGh5IDadCp97KJ0ITOw2AurQn9zWf
|
||||
G3bQn8A0cBzdXZ1zsEjbiw9Lk+2ojXtQN9DfazB24hSBqeXMudiAUP7VW7gyg+nTvzXovJs9ktf1
|
||||
WC5L3yorPyJRrq75fJqPkYLN+wGB01AFcz0PvoIk7Uqv3vsCqMxly5f/kFLaOCGW8kIAdi+JhshS
|
||||
koFrE/TVz9hyqiEfAWkREHXvhl3tPiedOEkKeF4B/s0vnnoWtmD7ajC1N7mez6lwUGDUbxhGeeCA
|
||||
JUwFqCSMr+je7Mdguh/0CWxgadGfA5/kLdcmIRwSMtBv/rfmKZwIKO0JOxUPttZjqqaCyZMWaK+A
|
||||
GaHkKv4R/J98Ij8rFWRBnxLYezvGw2teAUAil0bAU/sV7y2A5h6TZb5u8+nU7aGSXyYTn794buUZ
|
||||
gu2bz+hu94x6+bm7Pr/9+cXjYPk59S54Xd4F2a7+T2SSuMAVT4nknt/J6DU7Dq5+ilqakQa/+bKP
|
||||
C4CEffNa9d+7BZ9OEpBkVC9zOog3DRZUoFg36iQgH7eHwPGgSFQwDD2BnbwBP5QwuofNnrFp13Ig
|
||||
ltQT6Z/ZK58DwdAgr+cLOpxbkdG7kR/BeXO//PO+K36i3/x6kS7HZKl32QQd1+XxhaVqPn6ypYBK
|
||||
MB8ptqlqzs/d8Qn7AHto004/ydJvvRiKzavGHi/NYFzvAzkefWo9no3JkHWCwCsz85s/5PP2LobQ
|
||||
MsUtUvhply80PWWwFu8eghcQJeO9kc7wW39rHhpM+auEv/hzWP3XWB777je/drdKYzJ67wmsVeGM
|
||||
da90g3U/g8ClW244qEsrWDKZLcDcVyppwKjUU0AYATuLt7AjyI+8b4VTCmdWnchWAzX7zddXfU2q
|
||||
tNYT/pvXXmT7QPf6QavJy2yOUM6UC9aGR5UPDWpKWOlkQttyPoNZf2QbyJ9FiQirPp0NykI4vQwL
|
||||
f/3ClO6cEjIWaTgSBtVk9qxNcN2PoT6LFjAsa1yhavoWW2IjMPK+FANMuSamOwGRnvmHlwHtdLyh
|
||||
p/XSErF4NGd4MBGkuqe29fSUBAe4yzT+1odQemMMD5wt4m9exU9dLYG9SRvs/fL5pj6q5QfvyQY8
|
||||
DVNcup8KXjItpehatf38UewC1tsuJSRCcdB+84RR/czULu5NP/P2AaqLsI8J16TbnFmfqf3mRxS7
|
||||
wxIQVY6RUqh2Q/WQ49kE3wcJ7LZj9M33GJv4qwM3hUKwliddP1G+i8Ca39JQF4Ke85bWh7cpT7E3
|
||||
bSYwUJK5UMx9gr/6fzTvWgHT6E1J3ZtLMB0iWQF80iR4f+2SYL4+FAK/+0/4ehSSV1E4MVjHj90d
|
||||
24AxUTYKsCNlxEEtQkZXfwqKR7sQGG/AyvcHHz7S9rXmnQGbX/TQwji1MqwZs51z3gEgaPZ+gs0L
|
||||
MHJ6Nq+d4uSLjJTKNQOGVHv66jm6P42Fycw8ytTyYW6ov96n6OUrIN6NgPr3lvQjObIKfuxioiu/
|
||||
JwzdltXPpztqWz8k6Ir9YYDikokUWZqZtDfjU379Czb4ahfw1NtsYPlzCbC1FzJzni+zDy/0WiBo
|
||||
jRLocvfqQGm7s6lpMq/n1nwHVsFPRfiTawfzuK8z4FK/xYGRn+tJSeUOdtpRRSpNheDZV2EGVz+G
|
||||
QHlU+yk8TxzAsM6pW+ZhMB9sUEJZk1383R9YSH2BIPcuT2yUR9xPjd6G8lovdN/oXM9YfGzhcc+x
|
||||
3987sV0TwxbcCrrqZzY75sWHo5KFVNfULVu25NaBczBdKc6zUzC/xu0A0w22MKZ3GSzslhiQnoMj
|
||||
RUL3MIfPsvVhqBsfIvkJS4ZTpQoQt+CHVOjzk8wnaUiVQE2fSHzdfno6HswCnjvxRMDqL8a37TuQ
|
||||
O0sWDn4+yBzr+OBCLqpE9EiOvsn58hSDexPM2D72z5ok3ikFsVqW37yoH7RgSEEjyzN25In0U1uU
|
||||
BIrH+UF9Tz3Xcxb6R3i0kI4xdR8mcRtkgG8+rwZ21bcr/8DmfBjRnBw7c0ABF6uPIfpgfUgic4kV
|
||||
1QIxsl3qHkb3myeUX/2A5nur9ZwycSWEMFeILAK7nlwz3sBnsNujNV+oOy4sI+g3o4Wtnw6a43CX
|
||||
LPg+eAoOlWNQz2rHFnhJ+uDXfzZnlBuw7sKYniq4C1gX2tw//mGQQDDGo9SAr1/evcs+X9rKnf7B
|
||||
/7NQMXK67xdYb9v0u3+XEEnSDdA0bYjjF5r633yF27ghmYaNwViNHo6aSwLEpjV2bLA2pwhk3bEn
|
||||
FIdNPkuTQmDEQ5t6xpzk7LjzHJAKOo+9ci8DSmS3VKepkLEeKMd+9fMa+OYtYAiSmrH42gLb3l2w
|
||||
Kf60fZu/yo0qWUjGyPqBZgu2xwmO58Ulwqv3AG+P5QDA5ASIa4q3uSjJu4NrvoHx3otrZhy1GJaP
|
||||
9IaTIUvYkninDKSiKKLL+fJTL0k0VuAytlfCnSLaz7TOJ8ArXI4tPuzyyRm7Cm6CqMPou39uHQ0H
|
||||
kDzZUbcNs4RVI2cBzqlfKx5GbPqOl+SHHfVQ6yXzyp+AAWeibsWn9VhFYgj++p4K+M9//fnzv74n
|
||||
DJr2VrzWgwFjMY///u+jAv/Obtm/OU74PYZAhqws/vr7nxMIf336tvmM/3tsn8V7+OvvP8LvUYO/
|
||||
xnbMXv/P5X+tL/rPf/0XAAAA//8DAPM57x7eIAAA
|
||||
headers:
|
||||
CF-Cache-Status:
|
||||
- DYNAMIC
|
||||
CF-RAY:
|
||||
- 89de84040d6cdab1-MIA
|
||||
Connection:
|
||||
- keep-alive
|
||||
Content-Encoding:
|
||||
- gzip
|
||||
Content-Type:
|
||||
- application/json
|
||||
Date:
|
||||
- Thu, 04 Jul 2024 10:43:40 GMT
|
||||
Server:
|
||||
- cloudflare
|
||||
Transfer-Encoding:
|
||||
- chunked
|
||||
access-control-allow-origin:
|
||||
- '*'
|
||||
alt-svc:
|
||||
- h3=":443"; ma=86400
|
||||
openai-model:
|
||||
- text-embedding-ada-002
|
||||
openai-organization:
|
||||
- crewai-iuxna1
|
||||
openai-processing-ms:
|
||||
- '15'
|
||||
openai-version:
|
||||
- '2020-10-01'
|
||||
strict-transport-security:
|
||||
- max-age=31536000; includeSubDomains
|
||||
x-ratelimit-limit-requests:
|
||||
- '10000'
|
||||
x-ratelimit-limit-tokens:
|
||||
@@ -398,7 +393,7 @@ interactions:
|
||||
x-ratelimit-reset-tokens:
|
||||
- 0s
|
||||
x-request-id:
|
||||
- req_834c84237ace9a79492cb9e8d1f68737
|
||||
- req_c684605ab95c8ee822c1b082fc95d416
|
||||
status:
|
||||
code: 200
|
||||
message: OK
|
||||
|
||||
@@ -9,6 +9,7 @@ from pydantic_core import ValidationError
|
||||
|
||||
from crewai import Agent, Crew, Process, Task
|
||||
from crewai.tasks.task_output import TaskOutput
|
||||
from crewai.utilities.converter import Converter
|
||||
|
||||
|
||||
def test_task_tool_reflect_agent_tools():
|
||||
@@ -310,7 +311,7 @@ def test_output_json_to_another_task():
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task1, task2])
|
||||
result = crew.kickoff()
|
||||
assert '{\n "score": 3\n}' == result
|
||||
assert '{\n "score": 5\n}' == result
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -393,6 +394,38 @@ def test_save_task_pydantic_output():
|
||||
save_file.assert_called_once_with('{"score":4}')
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_custom_converter_cls():
|
||||
class ScoreOutput(BaseModel):
|
||||
score: int
|
||||
|
||||
class ScoreConverter(Converter):
|
||||
pass
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
description="Give me an integer score between 1-5 for the following title: 'The impact of AI in the future of work'",
|
||||
expected_output="The score of the title.",
|
||||
output_pydantic=ScoreOutput,
|
||||
converter_cls=ScoreConverter,
|
||||
agent=scorer,
|
||||
)
|
||||
|
||||
crew = Crew(agents=[scorer], tasks=[task])
|
||||
|
||||
with patch.object(
|
||||
ScoreConverter, "to_pydantic", return_value=ScoreOutput(score=5)
|
||||
) as mock_to_pydantic:
|
||||
crew.kickoff()
|
||||
mock_to_pydantic.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_increment_delegations_for_hierarchical_process():
|
||||
from langchain_openai import ChatOpenAI
|
||||
@@ -413,31 +446,29 @@ def test_increment_delegations_for_hierarchical_process():
|
||||
agents=[scorer],
|
||||
tasks=[task],
|
||||
process=Process.hierarchical,
|
||||
manager_llm=ChatOpenAI(model="gpt-4-0125-preview"),
|
||||
manager_llm=ChatOpenAI(model="gpt-4o"),
|
||||
)
|
||||
|
||||
with patch.object(Task, "increment_delegations") as increment_delegations:
|
||||
increment_delegations.return_value = None
|
||||
crew.kickoff()
|
||||
increment_delegations.assert_called_once
|
||||
increment_delegations.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
def test_increment_delegations_for_sequential_process():
|
||||
pass
|
||||
|
||||
manager = Agent(
|
||||
role="Manager",
|
||||
goal="Coordinate scoring processes",
|
||||
backstory="You're great at delegating work about scoring.",
|
||||
allow_delegation=False,
|
||||
allow_delegation=True,
|
||||
)
|
||||
|
||||
scorer = Agent(
|
||||
role="Scorer",
|
||||
goal="Score the title",
|
||||
backstory="You're an expert scorer, specialized in scoring titles.",
|
||||
allow_delegation=False,
|
||||
allow_delegation=True,
|
||||
)
|
||||
|
||||
task = Task(
|
||||
@@ -455,7 +486,7 @@ def test_increment_delegations_for_sequential_process():
|
||||
with patch.object(Task, "increment_delegations") as increment_delegations:
|
||||
increment_delegations.return_value = None
|
||||
crew.kickoff()
|
||||
increment_delegations.assert_called_once
|
||||
increment_delegations.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.vcr(filter_headers=["authorization"])
|
||||
@@ -490,7 +521,7 @@ def test_increment_tool_errors():
|
||||
with patch.object(Task, "increment_tools_errors") as increment_tools_errors:
|
||||
increment_tools_errors.return_value = None
|
||||
crew.kickoff()
|
||||
increment_tools_errors.assert_called_once
|
||||
assert len(increment_tools_errors.mock_calls) == 3
|
||||
|
||||
|
||||
def test_task_definition_based_on_dict():
|
||||
|
||||
@@ -17,6 +17,10 @@ class TestPickleHandler(unittest.TestCase):
|
||||
os.remove(self.file_path)
|
||||
|
||||
def test_initialize_file(self):
|
||||
assert os.path.exists(self.file_path) is False
|
||||
|
||||
self.handler.initialize_file()
|
||||
|
||||
assert os.path.exists(self.file_path) is True
|
||||
assert os.path.getsize(self.file_path) >= 0
|
||||
|
||||
|
||||
Reference in New Issue
Block a user