Compare commits

...

9 Commits

Author SHA1 Message Date
Lucas Gomide
910f051eb2 Merge branch 'main' into docs/oss-upgrade-migration-guide 2026-05-07 18:13:50 -03:00
iris-clawd
0844ed3c4e docs: add pt-BR, ar, ko translations of upgrade/migration guide
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-07 19:42:09 +00:00
iris-clawd
874c34f1da docs: consolidate upgrade & migration guide into single page
Merge the broader root-level upgrade-crewai.mdx into the canonical
en/guides/migration/upgrading-crewai.mdx so there is one comprehensive
upgrade & migration page covering: project venv vs global CLI, why
crewai install alone won't bump versions, breaking changes, and the
Crew-to-Flow migration. Removes the orphaned root-level file (which
was not referenced in docs.json nav).

Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-07 19:14:36 +00:00
iris-clawd
853b15fb3d docs: add upgrading-crewai guide and installation note
Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-07 18:37:26 +00:00
Greyson LaLonde
c67f6f63dc fix(ci): make nightly publish idempotent and serialized
Some checks are pending
CodeQL Advanced / Analyze (actions) (push) Waiting to run
CodeQL Advanced / Analyze (python) (push) Waiting to run
Vulnerability Scan / pip-audit (push) Waiting to run
2026-05-08 02:20:31 +08:00
Greyson LaLonde
964066e86b fix(ci): stamp and pin all workspace packages in nightly publish
Some checks failed
Build uv cache / build-cache (3.10) (push) Waiting to run
Build uv cache / build-cache (3.11) (push) Waiting to run
Build uv cache / build-cache (3.12) (push) Waiting to run
Build uv cache / build-cache (3.13) (push) Waiting to run
CodeQL Advanced / Analyze (actions) (push) Has been cancelled
CodeQL Advanced / Analyze (python) (push) Has been cancelled
Vulnerability Scan / pip-audit (push) Has been cancelled
2026-05-08 02:07:01 +08:00
Cole Goeppinger
74a1ff8db5 feat: update llm listings
Add the latest Anthropic and OpenAI LLMs to the CLI
2026-05-08 01:19:47 +08:00
iris-clawd
d73924d23a docs: add OSS upgrade & crew-to-flow migration guide 2026-05-07 16:56:41 +00:00
Greyson LaLonde
d6f7e7d5f8 chore(deps): use 3-day exclude-newer window
* chore(deps): use 3-day exclude-newer window

Aligns the root workspace with the per-package pyprojects, which
already use `exclude-newer = "3 days"`. The fixed 2026-04-27 cutoff
blocks legitimate dependency bumps (e.g. daytona ~=0.171 in #5740)
without adding meaningful protection — the relative window still
includes the security patches that motivated the original pin.

* fix(deps): bump gitpython and python-multipart for new advisories

- gitpython >=3.1.49 for GHSA-v87r-6q3f-2j67 (newline injection in
  config_writer().set_value() enables RCE via core.hooksPath).
- python-multipart >=0.0.27 for GHSA-pp6c-gr5w-3c5g (DoS via
  unbounded multipart part headers).

Both surfaced via pip-audit on this branch.
2026-05-08 00:11:05 +08:00
11 changed files with 1539 additions and 52 deletions

View File

@@ -5,6 +5,10 @@ on:
- cron: '0 6 * * *' # daily at 6am UTC
workflow_dispatch:
concurrency:
group: nightly-publish
cancel-in-progress: false
jobs:
check:
name: Check for new commits
@@ -18,10 +22,11 @@ jobs:
with:
fetch-depth: 0
- name: Check for commits in last 24h
- name: Check for recent commits
id: check
run: |
RECENT=$(git log --since="24 hours ago" --oneline | head -1)
# 25h window absorbs cron-vs-commit timing skew at the boundary.
RECENT=$(git log --since="25 hours ago" --oneline | head -1)
if [ -n "$RECENT" ]; then
echo "has_changes=true" >> "$GITHUB_OUTPUT"
else
@@ -38,34 +43,42 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v6
with:
version: "0.11.3"
python-version: "3.12"
enable-cache: false
- name: Stamp nightly versions
run: |
DATE=$(date +%Y%m%d)
# All workspace packages share the same base version and are released together.
BASE=$(python -c "
import re
print(re.search(r'__version__\s*=\s*\"(.*?)\"', open('lib/crewai/src/crewai/__init__.py').read()).group(1))
")
NIGHTLY="${BASE}.dev${DATE}"
echo "Nightly version: ${NIGHTLY}"
for init_file in \
lib/crewai/src/crewai/__init__.py \
lib/crewai-core/src/crewai_core/__init__.py \
lib/crewai-tools/src/crewai_tools/__init__.py \
lib/crewai-files/src/crewai_files/__init__.py; do
CURRENT=$(python -c "
import re
text = open('$init_file').read()
print(re.search(r'__version__\s*=\s*\"(.*?)\"\s*$', text, re.MULTILINE).group(1))
")
NIGHTLY="${CURRENT}.dev${DATE}"
lib/crewai-files/src/crewai_files/__init__.py \
lib/cli/src/crewai_cli/__init__.py; do
sed -i "s/__version__ = .*/__version__ = \"${NIGHTLY}\"/" "$init_file"
echo "$init_file: $CURRENT -> $NIGHTLY"
echo "Stamped $init_file -> $NIGHTLY"
done
# Update cross-package dependency pins to nightly versions
sed -i "s/\"crewai-tools==[^\"]*\"/\"crewai-tools==${NIGHTLY}\"/" lib/crewai/pyproject.toml
# Update all cross-package dependency pins to the nightly version.
sed -i "s/\"crewai==[^\"]*\"/\"crewai==${NIGHTLY}\"/" lib/crewai-tools/pyproject.toml
sed -i "s/\"crewai-core==[^\"]*\"/\"crewai-core==${NIGHTLY}\"/" lib/crewai/pyproject.toml
sed -i "s/\"crewai-cli==[^\"]*\"/\"crewai-cli==${NIGHTLY}\"/" lib/crewai/pyproject.toml
sed -i "s/\"crewai-tools==[^\"]*\"/\"crewai-tools==${NIGHTLY}\"/" lib/crewai/pyproject.toml
sed -i "s/\"crewai-files==[^\"]*\"/\"crewai-files==${NIGHTLY}\"/" lib/crewai/pyproject.toml
sed -i "s/\"crewai-core==[^\"]*\"/\"crewai-core==${NIGHTLY}\"/" lib/cli/pyproject.toml
echo "Updated cross-package dependency pins to ${NIGHTLY}"
- name: Build packages
@@ -85,13 +98,10 @@ jobs:
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/crewai
permissions:
id-token: write
contents: read
steps:
- uses: actions/checkout@v4
- name: Install uv
uses: astral-sh/setup-uv@v6
with:
@@ -116,7 +126,8 @@ jobs:
continue
fi
echo "Publishing $package"
if ! uv publish "$package"; then
# --check-url skips files already on PyPI so manual re-runs on the same day are idempotent.
if ! uv publish --check-url https://pypi.org/simple/ "$package"; then
echo "Failed to publish $package"
failed=1
fi

View File

@@ -0,0 +1,359 @@
---
title: "ترقية وترحيل CrewAI"
description: "كيفية ترقية CrewAI والتعامل مع التغييرات الجذرية وترحيل Crews إلى Flows."
icon: "arrow-up-circle"
---
## نظرة عامة
يتطور CrewAI بسرعة. الإصدارات الجديدة تقوم بانتظام بضبط مسارات الاستيراد، وتغيير الإعدادات الافتراضية لـ `Agent` و`Crew` و`Task`، وإدخال أساسيات تنسيق جديدة مثل `Flow` ونقاط الحفظ (checkpointing). يجمع هذا الدليل الخطوات العملية اللازمة من أجل:
- ترقية أداة سطر الأوامر العامة `crewai` والاعتمادية المثبّتة في مشروعك
- التكيّف مع التغييرات الجذرية في الاستيرادات والمعاملات
- ترحيل `Crew` مستقلة إلى `Flow` مكتوبة الأنواع
- تجنّب الفخاخ التي تظهر في أول تشغيل لمشروع مُرقَّى
إذا كنت تبدأ من الصفر، راجع [التثبيت](/ar/installation). إذا كنت قادمًا من إطار عمل آخر، راجع [الترحيل من LangGraph](/ar/guides/migration/migrating-from-langgraph).
---
## الشيئان اللذان قد ترغب في ترقيتهما
يوجد CrewAI في مكانين على جهازك، ويتم ترقيتهما بشكل مستقل:
| ماذا | كيف يُثبَّت | كيف تتم الترقية |
|---|---|---|
| **أداة سطر الأوامر العامة `crewai`** | `uv tool install crewai` | `uv tool install crewai --upgrade` |
| **بيئة venv للمشروع** (حيث يعمل الكود) | `crewai install` / `uv sync` | `uv add "crewai[...]>=X.Y.Z"` ثم `crewai install` |
يمكن لهما — وغالبًا ما يحدث — أن يخرجا عن التزامن. تشغيل `crewai --version` يُظهر إصدار سطر الأوامر. تشغيل `uv pip show crewai` داخل مشروعك يُظهر إصدار venv. إذا اختلفا، فهذا طبيعي؛ ما يهم بالنسبة للكود قيد التشغيل هو إصدار venv.
## لماذا لا يقوم `crewai install` وحده بالترقية
`crewai install` هو غلاف رفيع حول `uv sync`. يُثبّت بالضبط ما يقوله ملف `uv.lock` الحالي — وهو **لا** يرفع أي قيود إصدار.
إذا كان `pyproject.toml` يقول `crewai>=1.11.1` وقد قام ملف القفل بحلّه إلى `1.11.1`، فإن تشغيل `crewai install` سيُبقيك على `1.11.1` للأبد، حتى وإن كان الإصدار `1.14.4` متاحًا.
للترقية فعلًا، عليك:
1. تحديث قيد الإصدار في `pyproject.toml`
2. إعادة حلّ ملف القفل
3. مزامنة venv
`uv add` يقوم بالثلاثة في خطوة واحدة.
## كيفية ترقية مشروعك
```bash
# يرفع القيد ويعيد القفل في أمر واحد
uv add "crewai[tools]>=1.14.4"
# يزامن venv (crewai install يستدعي uv sync تحت الغطاء)
crewai install
# تحقّق
uv pip show crewai
# → Version: 1.14.4
```
استبدل `[tools]` بأي إضافات يستخدمها مشروعك (مثلًا `[tools,anthropic]`). تحقّق من قائمة `dependencies` في `pyproject.toml` إن لم تكن متأكدًا.
<Note>
يحدّث `uv add` كلا من `pyproject.toml` **و**`uv.lock` بشكل ذرّي. إذا قمت بتحرير `pyproject.toml` يدويًا، فإنك لا تزال بحاجة إلى تشغيل `uv lock --upgrade-package crewai` لإعادة حلّ ملف القفل قبل أن يلتقط `crewai install` الإصدار الجديد.
</Note>
## ترقية أداة سطر الأوامر العامة
أداة سطر الأوامر العامة منفصلة عن مشروعك. قم بترقيتها عبر:
```bash
uv tool install crewai --upgrade
```
إذا حذّرك الـ shell بشأن `PATH` بعد الترقية، قم بتحديثه:
```bash
uv tool update-shell
```
هذا **لا** يمسّ بيئة venv الخاصة بمشروعك — لا تزال بحاجة إلى `uv add` + `crewai install` داخل المشروع.
## التحقق من تزامن الاثنين
```bash
# إصدار سطر الأوامر العام
crewai --version
# إصدار venv للمشروع
uv pip show crewai | grep Version
```
ليس من الضروري أن يتطابقا — لكن إصدار venv للمشروع هو ما يهم لسلوك التشغيل.
<Note>
يتطلب CrewAI `Python >=3.10, <3.14`. إذا كان `uv` مثبَّتًا مقابل مفسّر أقدم، فأعد إنشاء venv للمشروع باستخدام إصدار Python مدعوم قبل تشغيل `crewai install`.
</Note>
---
## التغييرات الجذرية وملاحظات الترحيل
تتطلب معظم الترقيات تعديلات صغيرة فقط. المناطق أدناه هي تلك التي تنكسر بصمت أو بتتبعات مكدّس مربكة.
### مسارات الاستيراد: tools و`BaseTool`
الموقع الرسمي لاستيراد الـ tools هو `crewai.tools`. لا تزال المسارات القديمة تظهر في الدروس لكن يجب تحديثها.
```python
# قبل
from crewai_tools import BaseTool
from crewai.agents.tools import tool
# بعد
from crewai.tools import BaseTool, tool
```
كلٌ من المُزخرف `@tool` والفئة الفرعية `BaseTool` يقعان في `crewai.tools`. `AgentFinish` والرموز الأخرى الداخلية للوكيل لم تعد جزءًا من السطح العام — إذا كنت تستوردها، فانتقل إلى event listeners أو callbacks الـ `Task` بدلًا منها.
### تغييرات معاملات `Agent`
```python
from crewai import Agent
agent = Agent(
role="Researcher",
goal="Find authoritative sources on {topic}",
backstory="You are a careful, source-driven researcher.",
llm="gpt-4o-mini", # اسم نموذج كسلسلة نصية أو كائن LLM
verbose=True, # bool وليس مستوى عددي صحيح
max_iter=15, # تغيّر الافتراضي بين الإصدارات — حدّده بشكل صريح
allow_delegation=False,
)
```
- يقبل `llm` إما اسم نموذج كسلسلة نصية (يُحلَّ عبر المزوّد المهيّأ) أو كائن `LLM` للتحكم الدقيق.
- `verbose` هو `bool` بسيط. تمرير عدد صحيح لم يعد يبدّل مستويات السجل.
- تغيّرت افتراضات `max_iter` بين الإصدارات. إذا توقف وكيلك بصمت عن التكرار بعد أول استدعاء tool، فحدّد `max_iter` صراحةً.
### معاملات `Crew`
```python
from crewai import Crew, Process
crew = Crew(
agents=[...],
tasks=[...],
process=Process.sequential, # أو Process.hierarchical
memory=True,
cache=True,
embedder={"provider": "openai", "config": {"model": "text-embedding-3-small"}},
)
```
- يتطلب `process=Process.hierarchical` إما `manager_llm=` أو `manager_agent=`. بدون أحدهما، يرفع kickoff خطأً عند التحقّق.
- `memory=True` مع مزوّد embedding غير افتراضي يحتاج إلى قاموس `embedder` — راجع [إعداد الذاكرة وembedder](#memory-embedder-config) أدناه.
### الإخراج المُهيكل لـ `Task`
استخدم `output_pydantic` أو `output_json` أو `output_file` لإلزام نتيجة المهمة بشكل مكتوب الأنواع:
```python
from pydantic import BaseModel
from crewai import Task
class Article(BaseModel):
title: str
body: str
write = Task(
description="Write an article about {topic}",
expected_output="A short article with a title and body",
agent=writer,
output_pydantic=Article, # الفئة، وليس مثيلًا منها
output_file="output/article.md",
)
```
`output_pydantic` يأخذ **الفئة** نفسها. تمرير `Article(title="", body="")` خطأ شائع ويفشل بخطأ تحقّق مربك.
### إعداد الذاكرة وembedder
إذا كان `memory=True` وأنت لا تستخدم embeddings الافتراضية الخاصة بـ OpenAI، فيجب أن تمرّر `embedder`:
```python
crew = Crew(
agents=[...],
tasks=[...],
memory=True,
embedder={
"provider": "ollama",
"config": {"model": "nomic-embed-text"},
},
)
```
ضع بيانات اعتماد المزوّد المعنيّة (`OPENAI_API_KEY`, `OLLAMA_HOST`, إلخ) في ملف `.env`. مسارات تخزين الذاكرة محلية بالنسبة للمشروع افتراضيًا — احذف مجلد ذاكرة المشروع إذا غيّرت embedders، لأن الأبعاد لا تختلط.
---
## ترحيل Crew إلى Flow
`Crew` هي الأساس الصحيح عندما يكون لديك فريق واحد من الوكلاء ينفّذ سير عمل واحدًا. عندما تحتاج إلى تفرّعات أو عدة crews أو حالة مستمرة عبر التشغيلات، انتقل إلى `Flow`.
### متى تستخدم Flows مقابل Crews مستقلة
| الحالة | استخدم |
| --- | --- |
| فريق واحد، سير عمل خطّي/هرمي واحد | `Crew` |
| تفرّعات شرطية، إعادات محاولة، توجيه بناءً على النتائج | `Flow` |
| عدة crews متخصصة مرتبطة معًا | `Flow` |
| حالة يجب أن تستمر بين الخطوات أو التشغيلات | `Flow` (مع checkpointing) |
| تريد حالة مكتوبة الأنواع وملائمة لـ IDE | `Flow[MyState]` مع نموذج Pydantic |
إذا احتجت لأي من: التفرّعات، تعدّد الـ crew، أو الحالة المستمرة — ابدأ بـ `Flow`. الكود المتكرر صغير ولن تضطر إلى إعادة الكتابة لاحقًا.
### الترحيل خطوة بخطوة
**قبل — crew مستقلة:**
```python
from crewai import Crew
crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task])
result = crew.kickoff(inputs={"topic": "vector databases"})
print(result)
```
**بعد — crew داخل Flow مكتوب الأنواع:**
```python
from crewai.flow.flow import Flow, start, listen
from pydantic import BaseModel
class MyState(BaseModel):
input_data: str = ""
result: str = ""
class MyFlow(Flow[MyState]):
@start()
def run_crew(self):
result = MyCrew().crew().kickoff(inputs={"topic": self.state.input_data})
self.state.result = str(result)
return self.state.result
flow = MyFlow()
flow.kickoff(inputs={"input_data": "vector databases"})
```
ما الذي تغيّر:
1. تُبنى الـ crew داخل دالة، وليس عند تحميل الموديول.
2. تنساب المُدخلات عبر `self.state` بدلًا من تمريرها كـ kwargs.
3. تُحدَّد نقطة الدخول بـ `@start()`. الخطوات اللاحقة تستخدم `@listen(run_crew)` للربط.
### إعداد الحالة المُهيكلة
فضّل الحالة المكتوبة الأنواع (`Flow[MyState]`) على المتغير القاموسي غير المكتوب. تحصل على إكمال تلقائي، تحقّق عند الحدود، وحالة قابلة للسلسلة من أجل checkpointing:
```python
from pydantic import BaseModel, Field
class ResearchState(BaseModel):
topic: str = ""
sources: list[str] = Field(default_factory=list)
draft: str = ""
final: str = ""
```
الحالة غير المكتوبة (`Flow()` بدون نوع عام) لا تزال تعمل، لكنك تخسر الفحوص الساكنة ودقّة الـ checkpointing.
### نمط Flow متعدد الـ crews
ربط اثنين من الـ crews — بحث ثم كتابة — هو السبب الكلاسيكي لاعتماد Flows:
```python
from crewai.flow.flow import Flow, start, listen, router
from pydantic import BaseModel
class PipelineState(BaseModel):
topic: str = ""
research: str = ""
article: str = ""
class ContentPipeline(Flow[PipelineState]):
@start()
def research(self):
out = ResearchCrew().crew().kickoff(inputs={"topic": self.state.topic})
self.state.research = str(out)
return self.state.research
@router(research)
def gate(self):
return "write" if len(self.state.research) > 200 else "abort"
@listen("write")
def write(self):
out = WritingCrew().crew().kickoff(
inputs={"topic": self.state.topic, "notes": self.state.research}
)
self.state.article = str(out)
return self.state.article
@listen("abort")
def bail(self):
self.state.article = "Insufficient research."
return self.state.article
ContentPipeline().kickoff(inputs={"topic": "vector databases"})
```
`@start()` و`@listen()` و`@router()` هي المُزخرفات الثلاثة التي ستستخدمها 95% من الوقت. راجع [Flows](/ar/concepts/flows) للمرجع الكامل.
---
## الفخاخ الشائعة
1. **تشغيل `crewai install` وتوقّع ترقية.** يُزامن `crewai install` مع `uv.lock` الموجود. لرفع الإصدارات، شغّل `uv add "crewai[tools]>=X.Y.Z"` أولًا، ثم `crewai install`.
2. **القيد هو حدّ أدنى وليس تثبيتًا.** `crewai>=1.11.1` يعني "أي إصدار يساوي 1.11.1 أو أعلى". لا يعيد `uv` الحل إلا عند تشغيل `uv add` أو `uv lock --upgrade-package crewai` صراحةً.
3. **إسقاط الإضافات أثناء إعادة القفل.** إذا شغّلت `uv add "crewai>=1.14.4"` بدون إضافات، فقد يُسقط `uv` الـ `[tools]` من المجموعة المحلولة. ضمّن دائمًا الإضافات التي تحتاجها: `uv add "crewai[tools]>=1.14.4"`.
4. **نسيان commit `uv.lock`.** بعد رفع الإصدار بـ `uv add`، قم بـ commit للـ `uv.lock` المُحدَّث حتى يحصل زملاؤك على نفس الإصدارات.
5. **`pip install` بدلًا من `uv tool install`.** مزج `crewai` المُثبَّت بـ pip و`uv` يؤدي إلى ثنائيين في `PATH` وانحراف إصدارات مربك. اختر واحدًا — المدعوم هو `uv`.
6. **تمرير مثيل Pydantic إلى `output_pydantic`.** يتوقع الفئة. `output_pydantic=Article` وليس `output_pydantic=Article(...)`.
7. **العملية الهرمية بدون مدير.** يتطلب `process=Process.hierarchical` `manager_llm=` أو `manager_agent=`.
8. **الذاكرة ممكّنة مع embedder خاطئ.** تبديل embedders دون تنظيف مجلد الذاكرة على القرص يسبب عدم تطابق في الأبعاد. احذف مخزن الذاكرة الخاص بالمشروع بعد تغيير المزوّدين.
9. **حالة قاموس عندما كنت تريد حالة مكتوبة.** `Flow()` بدون نوع عام يعطيك قاموسًا. للفحص النوعي والـ checkpointing النظيف، استخدم `Flow[MyState]` مع `BaseModel`.
10. **استيرادات tools قديمة.** `from crewai_tools import BaseTool` يعمل في بعض الإصدارات لكنه ليس المسار الرسمي. وحّد على `from crewai.tools import BaseTool, tool`.
11. **انحراف إصدار Python.** يتطلب CrewAI `>=3.10, <3.14`. سيقوم `uv` بسعادة ببناء venv مقابل 3.14+ إذا كان الافتراضي؛ ثبّت إصدار Python في `pyproject.toml`.
12. **`verbose=2` وأعلام عدد صحيح مماثلة.** `verbose` هو `bool`. استخدم event listeners للسجلات الأكثر تفصيلًا.
13. **استدعاء `crew.kickoff()` من داخل Flow بدون التغليف في `inputs={}`.** الـ Flows تمرر state وليس kwargs. لا تزال الـ crew تتوقع `inputs={...}`.
---
## Checkpointing
Checkpointing هو إضافة أحدث تُديم حالة الـ agent والـ crew والـ flow بين التشغيلات. يسمح للسير العمل طويل الأمد بالاستئناف بعد انهيار، أو إيقاف يدوي، أو نشر.
```python
crew = Crew(
agents=[...],
tasks=[...],
checkpoint=True,
)
```
نفس العَلَم مدعوم على `Flow` و`Agent`. تُكتب الحالة في المخزن المحلي للمشروع وتُعاد تشغيلها في `kickoff()` التالي بنفس المعرّف.
<Note>
Checkpointing في إصدار مبكر. قد تتغيّر واجهات APIs المتعلقة بدلالات الاستئناف، وخلفيات التخزين، والمعرّفات بين الإصدارات الثانوية — ثبّت إصدار `crewai` إذا كنت تعتمد عليه في الإنتاج.
</Note>
راجع [Checkpointing](/ar/concepts/checkpointing) للمرجع الكامل للميزة.
---
## الحصول على المساعدة
- **سجل التغييرات** — كل تغيير جذري مُسجَّل في [ملاحظات الإصدار](/ar/changelog).
- **GitHub Issues** — افتح واحدة في [github.com/crewAIInc/crewAI/issues](https://github.com/crewAIInc/crewAI/issues) مع إعادة إنتاج بسيطة ومخرجات `crewai --version`.
- **Discord** — Discord مجتمع CrewAI هو أسرع طريق للحصول على مساعدة في تصحيح الأخطاء: [community.crewai.com](https://community.crewai.com).
- **أدلة الترحيل** — إذا كنت قادمًا من إطار آخر، ابدأ من [الترحيل من LangGraph](/ar/guides/migration/migrating-from-langgraph).

View File

@@ -144,7 +144,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -1585,7 +1586,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -2066,7 +2068,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -2547,7 +2550,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -3028,7 +3032,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -3509,7 +3514,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -3988,7 +3994,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -4467,7 +4474,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -4947,7 +4955,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -5428,7 +5437,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]
@@ -5907,7 +5917,8 @@
"group": "Migration",
"icon": "shuffle",
"pages": [
"en/guides/migration/migrating-from-langgraph"
"en/guides/migration/migrating-from-langgraph",
"en/guides/migration/upgrading-crewai"
]
}
]

View File

@@ -0,0 +1,359 @@
---
title: "Upgrading & Migrating CrewAI"
description: "How to upgrade CrewAI in your project, migrate around breaking changes, and move standalone Crews onto Flows."
icon: "arrow-up-circle"
---
## Overview
CrewAI moves quickly. New releases regularly tighten import paths, change defaults on `Agent`, `Crew`, and `Task`, and introduce new orchestration primitives like `Flow` and checkpointing. This guide collects the practical steps needed to:
- Upgrade the global `crewai` CLI and your project's pinned dependency
- Adapt to breaking changes in imports and parameters
- Migrate a standalone `Crew` to a typed `Flow`
- Avoid the gotchas that show up the first time you re-run an upgraded project
If you're starting fresh, see [Installation](/en/installation). If you're coming from another framework, see [Migrating from LangGraph](/en/guides/migration/migrating-from-langgraph).
---
## The Two Things You Might Want to Upgrade
CrewAI lives in two places on your machine, and they upgrade independently:
| What | How it's installed | How to upgrade |
|---|---|---|
| The **global `crewai` CLI** | `uv tool install crewai` | `uv tool install crewai --upgrade` |
| The **project venv** (what your code runs) | `crewai install` / `uv sync` | `uv add "crewai[...]>=X.Y.Z"` then `crewai install` |
These can — and often do — get out of sync. Running `crewai --version` tells you the CLI version. Running `uv pip show crewai` inside your project tells you the venv version. If they differ, that's normal; what matters for your running code is the venv version.
## Why `crewai install` Alone Doesn't Upgrade
`crewai install` is a thin wrapper around `uv sync`. It installs exactly what the current `uv.lock` file says — it does **not** bump any version constraints.
If your `pyproject.toml` says `crewai>=1.11.1` and the lock file resolved to `1.11.1`, running `crewai install` will keep you on `1.11.1` forever, even if `1.14.4` is available.
To actually upgrade, you need to:
1. Update the version constraint in `pyproject.toml`
2. Re-solve the lock file
3. Sync the venv
`uv add` does all three in one shot.
## How to Upgrade Your Project
```bash
# Bump the constraint and re-lock in one command
uv add "crewai[tools]>=1.14.4"
# Sync the venv (crewai install calls uv sync under the hood)
crewai install
# Verify
uv pip show crewai
# → Version: 1.14.4
```
Replace `[tools]` with whatever extras your project uses (e.g. `[tools,anthropic]`). Check your `pyproject.toml` `dependencies` list if you're unsure.
<Note>
`uv add` updates both `pyproject.toml` **and** `uv.lock` atomically. If you edit `pyproject.toml` manually, you still need to run `uv lock --upgrade-package crewai` to re-solve the lock file before `crewai install` will pick up the new version.
</Note>
## Upgrading the Global CLI
The global CLI is separate from your project. Upgrade it with:
```bash
uv tool install crewai --upgrade
```
If your shell warns about `PATH` after the upgrade, refresh it:
```bash
uv tool update-shell
```
This does **not** touch your project's venv — you still need `uv add` + `crewai install` inside the project.
## Verify Both Are in Sync
```bash
# Global CLI version
crewai --version
# Project venv version
uv pip show crewai | grep Version
```
They don't need to match — but your project venv version is what matters for runtime behavior.
<Note>
CrewAI requires `Python >=3.10, <3.14`. If `uv` was installed against an older interpreter, recreate the project venv with a supported Python before running `crewai install`.
</Note>
---
## Breaking Changes & Migration Notes
Most upgrades only require small adjustments. The areas below are the ones that break silently or with confusing tracebacks.
### Import paths: tools and `BaseTool`
The canonical import location for tools is `crewai.tools`. Older paths still surface in tutorials but should be updated.
```python
# Before
from crewai_tools import BaseTool
from crewai.agents.tools import tool
# After
from crewai.tools import BaseTool, tool
```
The `@tool` decorator and `BaseTool` subclass both live in `crewai.tools`. `AgentFinish` and other internal-agent symbols are no longer part of the public surface — if you were importing them, switch to event listeners or `Task` callbacks instead.
### `Agent` parameter changes
```python
from crewai import Agent
agent = Agent(
role="Researcher",
goal="Find authoritative sources on {topic}",
backstory="You are a careful, source-driven researcher.",
llm="gpt-4o-mini", # string model name OR an LLM object
verbose=True, # bool, not an int level
max_iter=15, # default has changed across versions — set explicitly
allow_delegation=False,
)
```
- `llm` accepts either a string model name (resolved via the configured provider) or an `LLM` object for fine-grained control.
- `verbose` is a plain `bool`. Passing an integer no longer toggles log levels.
- `max_iter` defaults have shifted between releases. If your agent silently stops looping after the first tool call, set `max_iter` explicitly.
### `Crew` parameters
```python
from crewai import Crew, Process
crew = Crew(
agents=[...],
tasks=[...],
process=Process.sequential, # or Process.hierarchical
memory=True,
cache=True,
embedder={"provider": "openai", "config": {"model": "text-embedding-3-small"}},
)
```
- `process=Process.hierarchical` requires either `manager_llm=` or `manager_agent=`. Without one, kickoff raises at validation time.
- `memory=True` with a non-default embedding provider needs an `embedder` dict — see [Memory & embedder config](#memory-embedder-config) below.
### `Task` structured output
Use `output_pydantic`, `output_json`, or `output_file` to coerce a task's result into a typed shape:
```python
from pydantic import BaseModel
from crewai import Task
class Article(BaseModel):
title: str
body: str
write = Task(
description="Write an article about {topic}",
expected_output="A short article with a title and body",
agent=writer,
output_pydantic=Article, # the class, NOT an instance
output_file="output/article.md",
)
```
`output_pydantic` takes the **class** itself. Passing `Article(title="", body="")` is a common mistake and fails with a confusing validation error.
### Memory & embedder config
If `memory=True` and you're not using the default OpenAI embeddings, you must pass an `embedder`:
```python
crew = Crew(
agents=[...],
tasks=[...],
memory=True,
embedder={
"provider": "ollama",
"config": {"model": "nomic-embed-text"},
},
)
```
Set the relevant provider credentials (`OPENAI_API_KEY`, `OLLAMA_HOST`, etc.) in your `.env` file. Memory storage paths are project-local by default — delete the project's memory directory if you change embedders, since dimensions don't mix.
---
## Migrating a Crew to a Flow
`Crew` is the right primitive when you have a single team of agents executing one workflow. Once you need branching, multiple crews, or persistent state across runs, reach for `Flow`.
### When to use Flows vs standalone Crews
| Situation | Use |
| --- | --- |
| Single team, single linear/hierarchical workflow | `Crew` |
| Conditional branches, retries, routing on results | `Flow` |
| Multiple specialized crews chained together | `Flow` |
| State that must persist between steps or runs | `Flow` (with checkpointing) |
| You want typed, IDE-friendly state | `Flow[MyState]` with a Pydantic model |
If you only need one of: branching, multi-crew, or persistent state — start with a `Flow`. The boilerplate is small and you won't have to rewrite later.
### Step-by-step migration
**Before — standalone crew:**
```python
from crewai import Crew
crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task])
result = crew.kickoff(inputs={"topic": "vector databases"})
print(result)
```
**After — crew inside a typed Flow:**
```python
from crewai.flow.flow import Flow, start, listen
from pydantic import BaseModel
class MyState(BaseModel):
input_data: str = ""
result: str = ""
class MyFlow(Flow[MyState]):
@start()
def run_crew(self):
result = MyCrew().crew().kickoff(inputs={"topic": self.state.input_data})
self.state.result = str(result)
return self.state.result
flow = MyFlow()
flow.kickoff(inputs={"input_data": "vector databases"})
```
What changed:
1. The crew is constructed inside a method, not at module load.
2. Inputs flow through `self.state` instead of being threaded as kwargs.
3. The entry point is marked with `@start()`. Subsequent steps use `@listen(run_crew)` to chain.
### Structured state setup
Prefer typed state (`Flow[MyState]`) over the untyped dict variant. You get autocompletion, validation at the boundary, and serializable state for checkpointing:
```python
from pydantic import BaseModel, Field
class ResearchState(BaseModel):
topic: str = ""
sources: list[str] = Field(default_factory=list)
draft: str = ""
final: str = ""
```
Untyped state (`Flow()` with no generic) still works, but you lose static checks and checkpointing fidelity.
### Multi-crew Flow pattern
Chaining two crews — research, then writing — is the canonical reason to adopt Flows:
```python
from crewai.flow.flow import Flow, start, listen, router
from pydantic import BaseModel
class PipelineState(BaseModel):
topic: str = ""
research: str = ""
article: str = ""
class ContentPipeline(Flow[PipelineState]):
@start()
def research(self):
out = ResearchCrew().crew().kickoff(inputs={"topic": self.state.topic})
self.state.research = str(out)
return self.state.research
@router(research)
def gate(self):
return "write" if len(self.state.research) > 200 else "abort"
@listen("write")
def write(self):
out = WritingCrew().crew().kickoff(
inputs={"topic": self.state.topic, "notes": self.state.research}
)
self.state.article = str(out)
return self.state.article
@listen("abort")
def bail(self):
self.state.article = "Insufficient research."
return self.state.article
ContentPipeline().kickoff(inputs={"topic": "vector databases"})
```
`@start()`, `@listen()`, and `@router()` are the three decorators you'll use 95% of the time. See [Flows](/en/concepts/flows) for the full reference.
---
## Common Gotchas
1. **Running `crewai install` and expecting an upgrade.** `crewai install` syncs against the existing `uv.lock`. To bump versions, run `uv add "crewai[tools]>=X.Y.Z"` first, then `crewai install`.
2. **The constraint is a floor, not a pin.** `crewai>=1.11.1` means "any version at or above 1.11.1." `uv` only re-resolves when you explicitly run `uv add` or `uv lock --upgrade-package crewai`.
3. **Extras dropped during re-lock.** If you run `uv add "crewai>=1.14.4"` without extras, `uv` may drop `[tools]` from the resolved set. Always include the extras you need: `uv add "crewai[tools]>=1.14.4"`.
4. **Forgetting to commit `uv.lock`.** After bumping with `uv add`, commit the updated `uv.lock` so teammates get the same versions.
5. **`pip install` instead of `uv tool install`.** Mixing pip-installed and uv-installed `crewai` leads to two binaries on `PATH` and confusing version skew. Pick one — the supported one is `uv`.
6. **Passing a Pydantic instance to `output_pydantic`.** It expects the class. `output_pydantic=Article`, not `output_pydantic=Article(...)`.
7. **Hierarchical process with no manager.** `process=Process.hierarchical` requires `manager_llm=` or `manager_agent=`.
8. **Memory enabled with the wrong embedder.** Switching embedders without clearing the on-disk memory directory causes dimension mismatches. Delete the project's memory store after changing providers.
9. **Dict state when you wanted typed state.** `Flow()` with no generic gives you a dict. For type checking and clean checkpointing, use `Flow[MyState]` with a `BaseModel`.
10. **Stale tool imports.** `from crewai_tools import BaseTool` works in some versions but is not the canonical path. Standardize on `from crewai.tools import BaseTool, tool`.
11. **Python version drift.** CrewAI requires `>=3.10, <3.14`. `uv` will happily build a venv against 3.14+ if it's the default; pin the Python version in `pyproject.toml`.
12. **`verbose=2` and similar integer flags.** `verbose` is a `bool`. Use event listeners for finer-grained logging.
13. **Calling `crew.kickoff()` from inside a Flow without wrapping in `inputs={}`.** Flows pass state, not kwargs. The crew still expects `inputs={...}`.
---
## Checkpointing
Checkpointing is a newer addition that persists agent, crew, and flow state between runs. It lets long-running workflows resume after a crash, a manual stop, or a deploy.
```python
crew = Crew(
agents=[...],
tasks=[...],
checkpoint=True,
)
```
The same flag is supported on `Flow` and `Agent`. State is written to the project's local store and replayed on the next `kickoff()` with the same identifier.
<Note>
Checkpointing is in early release. APIs around resume semantics, storage backends, and identifiers may still shift between minor versions — pin your `crewai` version if you depend on it in production.
</Note>
See [Checkpointing](/en/concepts/checkpointing) for the full feature reference.
---
## Getting Help
- **Changelog** — every breaking change is noted in the [release notes](/en/changelog).
- **GitHub Issues** — open one at [github.com/crewAIInc/crewAI/issues](https://github.com/crewAIInc/crewAI/issues) with a minimal repro and your `crewai --version` output.
- **Discord** — the CrewAI community Discord is the fastest path to debugging help: [community.crewai.com](https://community.crewai.com).
- **Migration guides** — if you're moving from another framework, start at [Migrating from LangGraph](/en/guides/migration/migrating-from-langgraph).

View File

@@ -106,6 +106,9 @@ If you haven't installed `uv` yet, follow **step 1** to quickly get it set up on
```shell
uv tool install crewai --upgrade
```
<Note>
This upgrades the **global `crewai` CLI tool** only. To upgrade the `crewai` version inside your project's virtual environment, see [Upgrading CrewAI in a project](/en/guides/migration/upgrading-crewai).
</Note>
<Check>Installation successful! You're ready to create your first crew! 🎉</Check>
</Step>

View File

@@ -0,0 +1,359 @@
---
title: "CrewAI 업그레이드 및 마이그레이션"
description: "CrewAI 업그레이드 방법, 브레이킹 체인지 처리, Crew에서 Flow로 마이그레이션하는 방법."
icon: "arrow-up-circle"
---
## 개요
CrewAI는 빠르게 발전합니다. 새로운 릴리스에서는 import 경로가 정비되고, `Agent`, `Crew`, `Task`의 기본값이 변경되며, `Flow`와 checkpointing 같은 새로운 오케스트레이션 프리미티브가 도입됩니다. 이 가이드는 다음에 필요한 실용적인 단계들을 모아둔 것입니다:
- 전역 `crewai` CLI와 프로젝트의 고정된 의존성 업그레이드
- import와 파라미터의 브레이킹 체인지에 적응
- 독립 실행형 `Crew`를 타입이 지정된 `Flow`로 마이그레이션
- 업그레이드된 프로젝트를 처음 다시 실행할 때 나타나는 함정 피하기
새로 시작한다면 [설치](/ko/installation)를 참고하세요. 다른 프레임워크에서 옮겨오는 경우라면 [LangGraph에서 마이그레이션](/ko/guides/migration/migrating-from-langgraph)을 참고하세요.
---
## 업그레이드할 수 있는 두 가지
CrewAI는 사용자의 머신에 두 곳에 존재하며, 각각 독립적으로 업그레이드됩니다:
| 무엇 | 설치 방법 | 업그레이드 방법 |
|---|---|---|
| **전역 `crewai` CLI** | `uv tool install crewai` | `uv tool install crewai --upgrade` |
| **프로젝트 venv** (코드가 실행되는 곳) | `crewai install` / `uv sync` | `uv add "crewai[...]>=X.Y.Z"` 후 `crewai install` |
이 둘은 — 그리고 자주 — 동기화가 어긋날 수 있습니다. `crewai --version`은 CLI 버전을 알려줍니다. 프로젝트 안에서 `uv pip show crewai`를 실행하면 venv 버전을 알려줍니다. 둘이 다른 것은 정상이며, 실행 중인 코드에 중요한 것은 venv 버전입니다.
## 왜 `crewai install`만으로는 업그레이드되지 않는가
`crewai install`은 `uv sync`를 감싼 얇은 래퍼입니다. 현재 `uv.lock` 파일이 지시하는 것 그대로를 설치할 뿐이며 — 어떤 버전 제약도 올리지 **않습니다**.
`pyproject.toml`이 `crewai>=1.11.1`이라 적혀 있고 lock 파일이 `1.11.1`로 해소되었다면, `crewai install`을 실행해도 `1.14.4`가 사용 가능하더라도 영원히 `1.11.1`에 머무릅니다.
실제로 업그레이드하려면 다음을 해야 합니다:
1. `pyproject.toml`의 버전 제약 업데이트
2. lock 파일 재해소
3. venv 동기화
`uv add`는 이 세 가지를 한 번에 처리합니다.
## 프로젝트 업그레이드 방법
```bash
# 제약을 올리고 lock을 다시 만드는 한 번의 명령
uv add "crewai[tools]>=1.14.4"
# venv 동기화 (crewai install은 내부적으로 uv sync를 호출)
crewai install
# 확인
uv pip show crewai
# → Version: 1.14.4
```
`[tools]`를 프로젝트에서 사용하는 extras로 바꾸세요 (예: `[tools,anthropic]`). 잘 모르겠다면 `pyproject.toml`의 `dependencies` 목록을 확인하세요.
<Note>
`uv add`는 `pyproject.toml`과 `uv.lock`을 **둘 다** 원자적으로 업데이트합니다. `pyproject.toml`을 수동으로 편집하는 경우, `crewai install`이 새 버전을 가져가도록 하기 전에 `uv lock --upgrade-package crewai`를 실행해 lock 파일을 다시 해소해야 합니다.
</Note>
## 전역 CLI 업그레이드
전역 CLI는 프로젝트와 분리되어 있습니다. 다음 명령으로 업그레이드하세요:
```bash
uv tool install crewai --upgrade
```
업그레이드 후 셸이 `PATH`에 대해 경고하면 새로고침하세요:
```bash
uv tool update-shell
```
이 명령은 프로젝트의 venv를 **건드리지 않습니다** — 프로젝트 내부에서는 여전히 `uv add` + `crewai install`이 필요합니다.
## 둘이 동기화되었는지 확인
```bash
# 전역 CLI 버전
crewai --version
# 프로젝트 venv 버전
uv pip show crewai | grep Version
```
둘이 일치할 필요는 없지만 — 런타임 동작에 중요한 것은 프로젝트 venv 버전입니다.
<Note>
CrewAI는 `Python >=3.10, <3.14`를 요구합니다. `uv`가 더 오래된 인터프리터로 설치되어 있다면, `crewai install`을 실행하기 전에 지원되는 Python으로 프로젝트 venv를 다시 만드세요.
</Note>
---
## 브레이킹 체인지 및 마이그레이션 노트
대부분의 업그레이드는 작은 조정만 필요합니다. 아래 항목들은 조용히 깨지거나 헷갈리는 트레이스백을 내는 영역들입니다.
### Import 경로: tools와 `BaseTool`
tools의 정식 import 위치는 `crewai.tools`입니다. 옛 경로들이 아직 튜토리얼에 등장하지만 업데이트해야 합니다.
```python
# 이전
from crewai_tools import BaseTool
from crewai.agents.tools import tool
# 이후
from crewai.tools import BaseTool, tool
```
`@tool` 데코레이터와 `BaseTool` 서브클래스는 모두 `crewai.tools`에 있습니다. `AgentFinish` 등 내부 에이전트 심볼들은 더 이상 공개 표면이 아닙니다 — import 중이었다면 event listener나 `Task` 콜백으로 전환하세요.
### `Agent` 파라미터 변경
```python
from crewai import Agent
agent = Agent(
role="Researcher",
goal="Find authoritative sources on {topic}",
backstory="You are a careful, source-driven researcher.",
llm="gpt-4o-mini", # 모델명 문자열 또는 LLM 객체
verbose=True, # 정수 레벨이 아닌 bool
max_iter=15, # 버전마다 기본값이 바뀌었음 — 명시적으로 지정
allow_delegation=False,
)
```
- `llm`은 문자열 모델명(설정된 provider를 통해 해소)이나 세밀한 제어를 위한 `LLM` 객체를 받습니다.
- `verbose`는 일반 `bool`입니다. 정수를 전달해도 더 이상 로그 레벨을 토글하지 않습니다.
- `max_iter`의 기본값은 릴리스 사이에 변경되었습니다. 첫 tool 호출 후 에이전트가 조용히 반복을 멈춘다면 `max_iter`를 명시적으로 지정하세요.
### `Crew` 파라미터
```python
from crewai import Crew, Process
crew = Crew(
agents=[...],
tasks=[...],
process=Process.sequential, # 또는 Process.hierarchical
memory=True,
cache=True,
embedder={"provider": "openai", "config": {"model": "text-embedding-3-small"}},
)
```
- `process=Process.hierarchical`은 `manager_llm=` 또는 `manager_agent=` 중 하나가 필요합니다. 둘 다 없으면 kickoff 시 검증 단계에서 오류가 발생합니다.
- 기본이 아닌 임베딩 provider와 함께 `memory=True`를 쓰려면 `embedder` dict가 필요합니다 — 아래의 [메모리와 embedder 설정](#memory-embedder-config)을 참고하세요.
### `Task` 구조화된 출력
`output_pydantic`, `output_json`, 또는 `output_file`을 사용해 task 결과를 타입이 지정된 형태로 강제할 수 있습니다:
```python
from pydantic import BaseModel
from crewai import Task
class Article(BaseModel):
title: str
body: str
write = Task(
description="Write an article about {topic}",
expected_output="A short article with a title and body",
agent=writer,
output_pydantic=Article, # 인스턴스가 아닌 클래스
output_file="output/article.md",
)
```
`output_pydantic`은 **클래스** 자체를 받습니다. `Article(title="", body="")`을 전달하는 것은 흔한 실수이며 헷갈리는 검증 오류로 실패합니다.
### 메모리와 embedder 설정
`memory=True`이고 OpenAI의 기본 임베딩을 사용하지 않는다면, `embedder`를 반드시 전달해야 합니다:
```python
crew = Crew(
agents=[...],
tasks=[...],
memory=True,
embedder={
"provider": "ollama",
"config": {"model": "nomic-embed-text"},
},
)
```
해당 provider의 자격 증명(`OPENAI_API_KEY`, `OLLAMA_HOST` 등)을 `.env` 파일에 설정하세요. 메모리 저장 경로는 기본적으로 프로젝트-로컬입니다 — embedder를 바꾸면 차원이 호환되지 않으므로 프로젝트의 메모리 디렉터리를 삭제하세요.
---
## Crew를 Flow로 마이그레이션
`Crew`는 단일 에이전트 팀이 하나의 워크플로우를 실행할 때 적합한 프리미티브입니다. 분기, 여러 crew, 또는 실행 간 영속 상태가 필요해지면 `Flow`로 넘어가세요.
### Flow 대 독립 Crew, 언제 무엇을 쓰나
| 상황 | 사용 |
| --- | --- |
| 단일 팀, 단일 선형/계층적 워크플로우 | `Crew` |
| 조건부 분기, 재시도, 결과 기반 라우팅 | `Flow` |
| 여러 전문 crew를 체인으로 연결 | `Flow` |
| 단계나 실행 사이에 유지되어야 하는 상태 | `Flow` (checkpointing 포함) |
| 타입이 지정된 IDE-친화적 상태가 필요 | Pydantic 모델과 함께 `Flow[MyState]` |
분기, 멀티-crew, 영속 상태 중 단 하나라도 필요하다면 — `Flow`로 시작하세요. 보일러플레이트는 적고 나중에 다시 작성할 필요가 없습니다.
### 단계별 마이그레이션
**이전 — 독립 crew:**
```python
from crewai import Crew
crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task])
result = crew.kickoff(inputs={"topic": "vector databases"})
print(result)
```
**이후 — 타입이 지정된 Flow 안의 crew:**
```python
from crewai.flow.flow import Flow, start, listen
from pydantic import BaseModel
class MyState(BaseModel):
input_data: str = ""
result: str = ""
class MyFlow(Flow[MyState]):
@start()
def run_crew(self):
result = MyCrew().crew().kickoff(inputs={"topic": self.state.input_data})
self.state.result = str(result)
return self.state.result
flow = MyFlow()
flow.kickoff(inputs={"input_data": "vector databases"})
```
달라진 점:
1. crew는 모듈 로드 시점이 아니라 메서드 안에서 생성됩니다.
2. 입력은 kwargs로 넘기는 대신 `self.state`를 통해 흐릅니다.
3. 진입점은 `@start()`로 표시됩니다. 이후 단계는 `@listen(run_crew)`로 체인합니다.
### 구조화된 상태 설정
dict 무타입 변형보다 타입이 지정된 상태(`Flow[MyState]`)를 선호하세요. 자동완성, 경계에서의 검증, checkpointing을 위한 직렬화 가능한 상태를 얻을 수 있습니다:
```python
from pydantic import BaseModel, Field
class ResearchState(BaseModel):
topic: str = ""
sources: list[str] = Field(default_factory=list)
draft: str = ""
final: str = ""
```
타입이 지정되지 않은 상태(제네릭 없는 `Flow()`)도 여전히 동작하지만, 정적 검사와 checkpointing 충실도를 잃게 됩니다.
### 멀티-crew Flow 패턴
두 crew를 — 조사 후 작성으로 — 체인으로 연결하는 것이 Flow를 도입하는 가장 전형적인 이유입니다:
```python
from crewai.flow.flow import Flow, start, listen, router
from pydantic import BaseModel
class PipelineState(BaseModel):
topic: str = ""
research: str = ""
article: str = ""
class ContentPipeline(Flow[PipelineState]):
@start()
def research(self):
out = ResearchCrew().crew().kickoff(inputs={"topic": self.state.topic})
self.state.research = str(out)
return self.state.research
@router(research)
def gate(self):
return "write" if len(self.state.research) > 200 else "abort"
@listen("write")
def write(self):
out = WritingCrew().crew().kickoff(
inputs={"topic": self.state.topic, "notes": self.state.research}
)
self.state.article = str(out)
return self.state.article
@listen("abort")
def bail(self):
self.state.article = "Insufficient research."
return self.state.article
ContentPipeline().kickoff(inputs={"topic": "vector databases"})
```
`@start()`, `@listen()`, `@router()`는 95%의 시간 동안 사용하게 될 세 가지 데코레이터입니다. 전체 레퍼런스는 [Flows](/ko/concepts/flows)를 참고하세요.
---
## 흔한 함정
1. **`crewai install`을 실행하고 업그레이드를 기대하기.** `crewai install`은 기존 `uv.lock`에 맞춰 동기화합니다. 버전을 올리려면 먼저 `uv add "crewai[tools]>=X.Y.Z"`를 실행하고 그다음 `crewai install`을 실행하세요.
2. **제약은 하한이지 핀이 아닙니다.** `crewai>=1.11.1`은 "1.11.1 이상의 어떤 버전이든"을 의미합니다. `uv`는 `uv add` 또는 `uv lock --upgrade-package crewai`를 명시적으로 실행할 때만 재해소합니다.
3. **재-락 중에 extras가 누락됨.** `uv add "crewai>=1.14.4"`를 extras 없이 실행하면 `uv`가 해소된 집합에서 `[tools]`를 떨어뜨릴 수 있습니다. 필요한 extras는 항상 포함하세요: `uv add "crewai[tools]>=1.14.4"`.
4. **`uv.lock`을 commit하는 것을 잊기.** `uv add`로 버전을 올린 후, 팀원들이 같은 버전을 받을 수 있도록 업데이트된 `uv.lock`을 commit하세요.
5. **`uv tool install` 대신 `pip install`.** pip로 설치한 `crewai`와 uv로 설치한 것을 섞으면 `PATH`에 두 개의 바이너리가 생기고 헷갈리는 버전 차이가 생깁니다. 하나를 고르세요 — 지원되는 것은 `uv`입니다.
6. **`output_pydantic`에 Pydantic 인스턴스를 전달.** 클래스를 기대합니다. `output_pydantic=Article`이지 `output_pydantic=Article(...)`가 아닙니다.
7. **manager 없이 hierarchical 프로세스.** `process=Process.hierarchical`은 `manager_llm=` 또는 `manager_agent=`를 요구합니다.
8. **잘못된 embedder로 메모리 활성화.** 디스크의 메모리 디렉터리를 비우지 않고 embedder를 바꾸면 차원 불일치가 발생합니다. provider를 변경한 후 프로젝트의 메모리 저장소를 삭제하세요.
9. **타입이 지정된 상태를 원했는데 dict 상태가 됨.** 제네릭이 없는 `Flow()`는 dict를 줍니다. 타입 검사와 깨끗한 checkpointing을 위해서는 `Flow[MyState]`와 `BaseModel`을 사용하세요.
10. **오래된 tool import.** `from crewai_tools import BaseTool`은 일부 버전에서 동작하지만 정식 경로가 아닙니다. `from crewai.tools import BaseTool, tool`로 표준화하세요.
11. **Python 버전 드리프트.** CrewAI는 `>=3.10, <3.14`를 요구합니다. `uv`는 기본이 3.14+라면 기쁘게 그것으로 venv를 빌드합니다; `pyproject.toml`에서 Python 버전을 핀하세요.
12. **`verbose=2`와 같은 정수 플래그.** `verbose`는 `bool`입니다. 더 세밀한 로깅에는 event listener를 사용하세요.
13. **Flow 안에서 `inputs={}` 없이 `crew.kickoff()` 호출.** Flow는 kwargs가 아닌 state를 전달합니다. crew는 여전히 `inputs={...}`를 기대합니다.
---
## Checkpointing
Checkpointing은 실행 사이에 agent, crew, flow 상태를 영속화하는 비교적 새로운 기능입니다. 장시간 실행되는 워크플로우가 크래시, 수동 중지, 또는 배포 이후에 재개될 수 있게 해줍니다.
```python
crew = Crew(
agents=[...],
tasks=[...],
checkpoint=True,
)
```
같은 플래그가 `Flow`와 `Agent`에서도 지원됩니다. 상태는 프로젝트의 로컬 저장소에 기록되고 동일한 식별자로 다음 `kickoff()` 시 재생됩니다.
<Note>
Checkpointing은 초기 릴리스 단계입니다. 재개 의미론, 저장 백엔드, 식별자에 관한 API는 마이너 버전 사이에서도 변경될 수 있습니다 — 프로덕션에서 의존한다면 `crewai` 버전을 핀하세요.
</Note>
전체 기능 레퍼런스는 [Checkpointing](/ko/concepts/checkpointing)을 참고하세요.
---
## 도움 받기
- **체인지로그** — 모든 브레이킹 체인지는 [릴리스 노트](/ko/changelog)에 기록됩니다.
- **GitHub Issues** — 최소 재현 코드와 `crewai --version` 출력과 함께 [github.com/crewAIInc/crewAI/issues](https://github.com/crewAIInc/crewAI/issues)에 이슈를 열어주세요.
- **Discord** — CrewAI 커뮤니티 Discord는 디버깅 도움을 가장 빠르게 받을 수 있는 경로입니다: [community.crewai.com](https://community.crewai.com).
- **마이그레이션 가이드** — 다른 프레임워크에서 옮겨오는 경우 [LangGraph에서 마이그레이션](/ko/guides/migration/migrating-from-langgraph)부터 시작하세요.

View File

@@ -0,0 +1,359 @@
---
title: "Atualizando e Migrando o CrewAI"
description: "Como atualizar o CrewAI, lidar com breaking changes e migrar Crews para Flows."
icon: "arrow-up-circle"
---
## Visão Geral
O CrewAI evolui rapidamente. Novas versões frequentemente ajustam caminhos de import, alteram defaults de `Agent`, `Crew` e `Task`, e introduzem novas primitivas de orquestração como `Flow` e checkpointing. Este guia reúne os passos práticos necessários para:
- Atualizar a CLI global `crewai` e a dependência fixada do seu projeto
- Adaptar-se a breaking changes em imports e parâmetros
- Migrar uma `Crew` independente para um `Flow` tipado
- Evitar as armadilhas que aparecem na primeira execução de um projeto atualizado
Se você está começando do zero, veja [Instalação](/pt-BR/installation). Se está vindo de outro framework, veja [Migrando do LangGraph](/pt-BR/guides/migration/migrating-from-langgraph).
---
## As Duas Coisas Que Você Pode Querer Atualizar
O CrewAI vive em dois lugares na sua máquina, e cada um se atualiza de forma independente:
| O quê | Como é instalado | Como atualizar |
|---|---|---|
| A **CLI global `crewai`** | `uv tool install crewai` | `uv tool install crewai --upgrade` |
| O **venv do projeto** (onde seu código roda) | `crewai install` / `uv sync` | `uv add "crewai[...]>=X.Y.Z"` e depois `crewai install` |
Esses dois podem — e frequentemente ficam — fora de sincronia. Rodar `crewai --version` mostra a versão da CLI. Rodar `uv pip show crewai` dentro do seu projeto mostra a versão do venv. Se forem diferentes, isso é normal; o que importa para o código em execução é a versão do venv.
## Por Que `crewai install` Sozinho Não Atualiza
`crewai install` é um wrapper fino em torno de `uv sync`. Ele instala exatamente o que o arquivo `uv.lock` atual diz — ele **não** muda nenhuma restrição de versão.
Se seu `pyproject.toml` diz `crewai>=1.11.1` e o lock file resolveu para `1.11.1`, executar `crewai install` vai te manter em `1.11.1` para sempre, mesmo que `1.14.4` esteja disponível.
Para realmente atualizar, você precisa:
1. Atualizar a restrição de versão em `pyproject.toml`
2. Re-resolver o lock file
3. Sincronizar o venv
`uv add` faz os três de uma vez só.
## Como Atualizar Seu Projeto
```bash
# Aumenta a restrição e re-resolve o lock em um único comando
uv add "crewai[tools]>=1.14.4"
# Sincroniza o venv (crewai install chama uv sync por baixo dos panos)
crewai install
# Verifica
uv pip show crewai
# → Version: 1.14.4
```
Substitua `[tools]` por quaisquer extras que seu projeto utilize (ex.: `[tools,anthropic]`). Verifique a lista de `dependencies` do seu `pyproject.toml` se estiver em dúvida.
<Note>
`uv add` atualiza tanto `pyproject.toml` **quanto** `uv.lock` atomicamente. Se você editar `pyproject.toml` manualmente, ainda precisa rodar `uv lock --upgrade-package crewai` para re-resolver o lock file antes que `crewai install` pegue a nova versão.
</Note>
## Atualizando a CLI Global
A CLI global é separada do seu projeto. Atualize com:
```bash
uv tool install crewai --upgrade
```
Se seu shell avisar sobre o `PATH` após a atualização, recarregue-o:
```bash
uv tool update-shell
```
Isso **não** mexe no venv do seu projeto — você ainda precisa de `uv add` + `crewai install` dentro do projeto.
## Verifique Se Ambos Estão em Sincronia
```bash
# Versão da CLI global
crewai --version
# Versão do venv do projeto
uv pip show crewai | grep Version
```
Eles não precisam coincidir — mas a versão do venv do projeto é o que importa para o comportamento em runtime.
<Note>
CrewAI requer `Python >=3.10, <3.14`. Se o `uv` foi instalado contra um interpretador mais antigo, recrie o venv do projeto com uma versão suportada do Python antes de rodar `crewai install`.
</Note>
---
## Breaking Changes e Notas de Migração
A maioria das atualizações requer apenas pequenos ajustes. As áreas abaixo são as que quebram silenciosamente ou com tracebacks confusos.
### Caminhos de import: tools e `BaseTool`
O caminho canônico para tools é `crewai.tools`. Caminhos antigos ainda aparecem em tutoriais, mas devem ser atualizados.
```python
# Antes
from crewai_tools import BaseTool
from crewai.agents.tools import tool
# Depois
from crewai.tools import BaseTool, tool
```
O decorador `@tool` e a subclasse `BaseTool` ambos vivem em `crewai.tools`. `AgentFinish` e outros símbolos internos do agente não fazem mais parte da superfície pública — se você os estava importando, mude para event listeners ou callbacks de `Task`.
### Mudanças de parâmetros em `Agent`
```python
from crewai import Agent
agent = Agent(
role="Researcher",
goal="Find authoritative sources on {topic}",
backstory="You are a careful, source-driven researcher.",
llm="gpt-4o-mini", # nome do modelo como string OU um objeto LLM
verbose=True, # bool, não um nível inteiro
max_iter=15, # default mudou entre versões — defina explicitamente
allow_delegation=False,
)
```
- `llm` aceita tanto um nome de modelo como string (resolvido pelo provedor configurado) quanto um objeto `LLM` para controle granular.
- `verbose` é um `bool` puro. Passar um inteiro não alterna mais níveis de log.
- Os defaults de `max_iter` mudaram entre releases. Se seu agente para silenciosamente de iterar após a primeira chamada de tool, defina `max_iter` explicitamente.
### Parâmetros de `Crew`
```python
from crewai import Crew, Process
crew = Crew(
agents=[...],
tasks=[...],
process=Process.sequential, # ou Process.hierarchical
memory=True,
cache=True,
embedder={"provider": "openai", "config": {"model": "text-embedding-3-small"}},
)
```
- `process=Process.hierarchical` requer ou `manager_llm=` ou `manager_agent=`. Sem um deles, o kickoff lança erro na validação.
- `memory=True` com um provedor de embedding não-default precisa de um dicionário `embedder` — veja [Configuração de memória e embedder](#memory-embedder-config) abaixo.
### Saída estruturada de `Task`
Use `output_pydantic`, `output_json` ou `output_file` para forçar o resultado de uma task em um formato tipado:
```python
from pydantic import BaseModel
from crewai import Task
class Article(BaseModel):
title: str
body: str
write = Task(
description="Write an article about {topic}",
expected_output="A short article with a title and body",
agent=writer,
output_pydantic=Article, # a classe, NÃO uma instância
output_file="output/article.md",
)
```
`output_pydantic` recebe a **classe** em si. Passar `Article(title="", body="")` é um erro comum e falha com um erro de validação confuso.
### Configuração de memória e embedder
Se `memory=True` e você não está usando os embeddings padrão da OpenAI, é preciso passar um `embedder`:
```python
crew = Crew(
agents=[...],
tasks=[...],
memory=True,
embedder={
"provider": "ollama",
"config": {"model": "nomic-embed-text"},
},
)
```
Defina as credenciais do provedor relevante (`OPENAI_API_KEY`, `OLLAMA_HOST`, etc.) no seu arquivo `.env`. Os caminhos de armazenamento de memória são locais ao projeto por default — apague o diretório de memória do projeto se trocar de embedder, já que dimensões diferentes não se misturam.
---
## Migrando uma Crew para um Flow
`Crew` é a primitiva certa quando você tem um único time de agentes executando um workflow. Quando você precisa de branches, múltiplas crews ou estado persistente entre execuções, parta para `Flow`.
### Quando usar Flows vs Crews independentes
| Situação | Use |
| --- | --- |
| Time único, workflow linear/hierárquico | `Crew` |
| Branches condicionais, retries, roteamento por resultado | `Flow` |
| Múltiplas crews especializadas encadeadas | `Flow` |
| Estado que precisa persistir entre etapas ou execuções | `Flow` (com checkpointing) |
| Você quer estado tipado e amigável à IDE | `Flow[MyState]` com um modelo Pydantic |
Se você precisa de qualquer um destes: branches, multi-crew ou estado persistente — comece com um `Flow`. O boilerplate é pequeno e você não precisará reescrever depois.
### Migração passo a passo
**Antes — crew independente:**
```python
from crewai import Crew
crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task])
result = crew.kickoff(inputs={"topic": "vector databases"})
print(result)
```
**Depois — crew dentro de um Flow tipado:**
```python
from crewai.flow.flow import Flow, start, listen
from pydantic import BaseModel
class MyState(BaseModel):
input_data: str = ""
result: str = ""
class MyFlow(Flow[MyState]):
@start()
def run_crew(self):
result = MyCrew().crew().kickoff(inputs={"topic": self.state.input_data})
self.state.result = str(result)
return self.state.result
flow = MyFlow()
flow.kickoff(inputs={"input_data": "vector databases"})
```
O que mudou:
1. A crew é construída dentro de um método, não no carregamento do módulo.
2. Inputs fluem por `self.state` em vez de serem passados como kwargs.
3. O ponto de entrada é marcado com `@start()`. Etapas seguintes usam `@listen(run_crew)` para encadear.
### Configuração de estado estruturado
Prefira estado tipado (`Flow[MyState]`) em vez da variante de dict não tipado. Você ganha autocompletar, validação na fronteira e estado serializável para checkpointing:
```python
from pydantic import BaseModel, Field
class ResearchState(BaseModel):
topic: str = ""
sources: list[str] = Field(default_factory=list)
draft: str = ""
final: str = ""
```
Estado não tipado (`Flow()` sem genérico) ainda funciona, mas você perde checagens estáticas e fidelidade no checkpointing.
### Padrão de Flow multi-crew
Encadear duas crews — pesquisa, depois escrita — é o motivo canônico para adotar Flows:
```python
from crewai.flow.flow import Flow, start, listen, router
from pydantic import BaseModel
class PipelineState(BaseModel):
topic: str = ""
research: str = ""
article: str = ""
class ContentPipeline(Flow[PipelineState]):
@start()
def research(self):
out = ResearchCrew().crew().kickoff(inputs={"topic": self.state.topic})
self.state.research = str(out)
return self.state.research
@router(research)
def gate(self):
return "write" if len(self.state.research) > 200 else "abort"
@listen("write")
def write(self):
out = WritingCrew().crew().kickoff(
inputs={"topic": self.state.topic, "notes": self.state.research}
)
self.state.article = str(out)
return self.state.article
@listen("abort")
def bail(self):
self.state.article = "Insufficient research."
return self.state.article
ContentPipeline().kickoff(inputs={"topic": "vector databases"})
```
`@start()`, `@listen()` e `@router()` são os três decoradores que você usará 95% do tempo. Veja [Flows](/pt-BR/concepts/flows) para a referência completa.
---
## Armadilhas Comuns
1. **Rodar `crewai install` esperando uma atualização.** `crewai install` sincroniza com base no `uv.lock` existente. Para subir versões, rode `uv add "crewai[tools]>=X.Y.Z"` primeiro e depois `crewai install`.
2. **A restrição é um piso, não um pin.** `crewai>=1.11.1` significa "qualquer versão a partir de 1.11.1". O `uv` só re-resolve quando você executa explicitamente `uv add` ou `uv lock --upgrade-package crewai`.
3. **Extras descartados durante o re-lock.** Se você rodar `uv add "crewai>=1.14.4"` sem extras, o `uv` pode descartar `[tools]` do conjunto resolvido. Sempre inclua os extras de que precisa: `uv add "crewai[tools]>=1.14.4"`.
4. **Esquecer de commitar `uv.lock`.** Após subir a versão com `uv add`, commite o `uv.lock` atualizado para que seus colegas tenham as mesmas versões.
5. **`pip install` em vez de `uv tool install`.** Misturar `crewai` instalado por pip e por uv leva a dois binários no `PATH` e divergência de versões confusa. Escolha um — o suportado é o `uv`.
6. **Passar uma instância Pydantic para `output_pydantic`.** Ele espera a classe. `output_pydantic=Article`, não `output_pydantic=Article(...)`.
7. **Processo hierárquico sem manager.** `process=Process.hierarchical` requer `manager_llm=` ou `manager_agent=`.
8. **Memória ativada com o embedder errado.** Trocar de embedder sem limpar o diretório de memória em disco causa incompatibilidade de dimensões. Apague o store de memória do projeto após mudar de provedor.
9. **Estado de dict quando você queria estado tipado.** `Flow()` sem genérico te dá um dict. Para checagem de tipos e checkpointing limpo, use `Flow[MyState]` com um `BaseModel`.
10. **Imports antigos de tools.** `from crewai_tools import BaseTool` funciona em algumas versões, mas não é o caminho canônico. Padronize com `from crewai.tools import BaseTool, tool`.
11. **Drift de versão do Python.** O CrewAI requer `>=3.10, <3.14`. O `uv` vai felizmente construir um venv contra 3.14+ se for o default; pin a versão do Python no `pyproject.toml`.
12. **`verbose=2` e flags inteiras semelhantes.** `verbose` é um `bool`. Use event listeners para logging mais granular.
13. **Chamar `crew.kickoff()` de dentro de um Flow sem encapsular em `inputs={}`.** Flows passam estado, não kwargs. A crew ainda espera `inputs={...}`.
---
## Checkpointing
Checkpointing é uma adição mais recente que persiste o estado de agent, crew e flow entre execuções. Permite que workflows de longa duração retomem após um crash, uma parada manual ou um deploy.
```python
crew = Crew(
agents=[...],
tasks=[...],
checkpoint=True,
)
```
A mesma flag é suportada em `Flow` e `Agent`. O estado é gravado no store local do projeto e reproduzido na próxima `kickoff()` com o mesmo identificador.
<Note>
Checkpointing está em release inicial. APIs em torno de semântica de retomada, backends de storage e identificadores ainda podem mudar entre versões menores — pin sua versão do `crewai` se você depende disso em produção.
</Note>
Veja [Checkpointing](/pt-BR/concepts/checkpointing) para a referência completa do recurso.
---
## Obtendo Ajuda
- **Changelog** — toda breaking change é registrada nas [release notes](/pt-BR/changelog).
- **GitHub Issues** — abra uma em [github.com/crewAIInc/crewAI/issues](https://github.com/crewAIInc/crewAI/issues) com um repro mínimo e a saída de `crewai --version`.
- **Discord** — o Discord da comunidade CrewAI é o caminho mais rápido para ajuda em debugging: [community.crewai.com](https://community.crewai.com).
- **Guias de migração** — se você está vindo de outro framework, comece em [Migrando do LangGraph](/pt-BR/guides/migration/migrating-from-langgraph).

View File

@@ -132,19 +132,44 @@ PROVIDERS: list[str] = [
MODELS: dict[str, list[str]] = {
"openai": [
"gpt-4",
"gpt-5.5",
"gpt-5.5-pro",
"gpt-5.4",
"gpt-5.4-pro",
"gpt-5.4-mini",
"gpt-5.4-nano",
"gpt-5.2",
"gpt-5.2-pro",
"gpt-5.1",
"gpt-5",
"gpt-5-pro",
"gpt-5-mini",
"gpt-5-nano",
"gpt-4.1",
"gpt-4.1-mini-2025-04-14",
"gpt-4.1-nano-2025-04-14",
"gpt-4.1-mini",
"gpt-4.1-nano",
"gpt-4o",
"gpt-4o-mini",
"o4-mini",
"o3",
"o3-mini",
"o1",
"o1-mini",
"o1-preview",
"gpt-4",
"gpt-4.1-mini-2025-04-14",
"gpt-4.1-nano-2025-04-14",
],
"anthropic": [
"claude-opus-4-6",
"claude-sonnet-4-6",
"claude-haiku-4-5-20251001",
"claude-3-7-sonnet-20250219",
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-sonnet-20240229",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307",
],
"gemini": [

View File

@@ -105,7 +105,7 @@ a2a = [
"aiocache[redis,memcached]~=0.12.3",
]
file-processing = [
"crewai-files",
"crewai-files==1.14.5a3",
]
qdrant-edge = [
"qdrant-edge-py>=0.6.0",

View File

@@ -170,10 +170,7 @@ info = "Commits must follow Conventional Commits 1.0.0."
[tool.uv]
# Pinned to include the security patch releases (authlib 1.6.11,
# langchain-text-splitters 1.1.2) uploaded on 2026-04-16, and the
# litellm 1.83.7+ SSTI fix (GHSA-xqmj-j6mv-4862) uploaded on 2026-04-13.
exclude-newer = "2026-04-27"
exclude-newer = "3 days"
# composio-core pins rich<14 but textual requires rich>=14.
# onnxruntime 1.24+ dropped Python 3.10 wheels; cap it so qdrant[fastembed] resolves on 3.10.
@@ -184,7 +181,8 @@ exclude-newer = "2026-04-27"
# cryptography 46.0.6 has CVE-2026-39892; force 46.0.7+.
# pypdf <6.10.2 has GHSA-4pxv-j86v-mhcw, GHSA-7gw9-cf7v-778f, GHSA-x284-j5p8-9c5p; force 6.10.2+.
# uv <0.11.6 has GHSA-pjjw-68hj-v9mw; force 0.11.6+.
# python-multipart <0.0.26 has GHSA-mj87-hwqh-73pj; force 0.0.26+.
# python-multipart <0.0.27 has GHSA-pp6c-gr5w-3c5g (DoS via unbounded multipart headers).
# gitpython <3.1.49 has GHSA-v87r-6q3f-2j67 (newline injection -> RCE via core.hooksPath).
# langsmith <0.7.31 has GHSA-rr7j-v2q5-chgv (streaming token redaction bypass); force 0.7.31+.
# authlib <1.6.11 has GHSA-jj8c-mmj3-mmgv (CSRF bypass in cache-based state storage).
# litellm 1.83.8+ hard-pins openai==2.24.0, missing openai.types.responses used by crewai;
@@ -201,7 +199,8 @@ override-dependencies = [
"cryptography>=46.0.7",
"pypdf>=6.10.2,<7",
"uv>=0.11.6,<1",
"python-multipart>=0.0.26,<1",
"python-multipart>=0.0.27,<1",
"gitpython>=3.1.49,<4",
"langsmith>=0.7.31,<0.8",
"authlib>=1.6.11",
]

18
uv.lock generated
View File

@@ -13,7 +13,8 @@ resolution-markers = [
]
[options]
exclude-newer = "2026-04-27T16:00:00Z"
exclude-newer = "2026-05-04T15:35:41.745265Z"
exclude-newer-span = "P3D"
[manifest]
members = [
@@ -27,6 +28,7 @@ members = [
overrides = [
{ name = "authlib", specifier = ">=1.6.11" },
{ name = "cryptography", specifier = ">=46.0.7" },
{ name = "gitpython", specifier = ">=3.1.49,<4" },
{ name = "langchain-core", specifier = ">=1.2.31,<2" },
{ name = "langchain-text-splitters", specifier = ">=1.1.2,<2" },
{ name = "langsmith", specifier = ">=0.7.31,<0.8" },
@@ -34,7 +36,7 @@ overrides = [
{ name = "openai", specifier = ">=2.30.0,<3" },
{ name = "pillow", specifier = ">=12.1.1" },
{ name = "pypdf", specifier = ">=6.10.2,<7" },
{ name = "python-multipart", specifier = ">=0.0.26,<1" },
{ name = "python-multipart", specifier = ">=0.0.27,<1" },
{ name = "rich", specifier = ">=13.7.1" },
{ name = "transformers", marker = "python_full_version >= '3.10'", specifier = ">=5.4.0" },
{ name = "urllib3", specifier = ">=2.6.3" },
@@ -2698,14 +2700,14 @@ wheels = [
[[package]]
name = "gitpython"
version = "3.1.47"
version = "3.1.49"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "gitdb" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c1/bd/50db468e9b1310529a19fce651b3b0e753b5c07954d486cba31bbee9a5d5/gitpython-3.1.47.tar.gz", hash = "sha256:dba27f922bd2b42cb54c87a8ab3cb6beb6bf07f3d564e21ac848913a05a8a3cd", size = 216978, upload-time = "2026-04-22T02:44:44.059Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e1/63/210aaa302d6a0a78daa67c5c15bbac2cad361722841278b0209b6da20855/gitpython-3.1.49.tar.gz", hash = "sha256:42f9399c9eb33fc581014bedd76049dfbaf6375aa2a5754575966387280315e1", size = 219367, upload-time = "2026-04-29T00:31:20.478Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f2/c5/a1bc0996af85757903cf2bf444a7824e68e0035ce63fb41d6f76f9def68b/gitpython-3.1.47-py3-none-any.whl", hash = "sha256:489f590edfd6d20571b2c0e72c6a6ac6915ee8b8cd04572330e3842207a78905", size = 209547, upload-time = "2026-04-22T02:44:41.271Z" },
{ url = "https://files.pythonhosted.org/packages/fd/6f/b842bfa6f21d6f87c57f9abf7194225e55279d96d869775e19e9f7236fc5/gitpython-3.1.49-py3-none-any.whl", hash = "sha256:024b0422d7f84d15cd794844e029ffebd4c5d42a7eb9b936b458697ef550a02c", size = 212190, upload-time = "2026-04-29T00:31:18.412Z" },
]
[[package]]
@@ -7379,11 +7381,11 @@ wheels = [
[[package]]
name = "python-multipart"
version = "0.0.26"
version = "0.0.27"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" }
sdist = { url = "https://files.pythonhosted.org/packages/69/9b/f23807317a113dc36e74e75eb265a02dd1a4d9082abc3c1064acd22997c4/python_multipart-0.0.27.tar.gz", hash = "sha256:9870a6a8c5a20a5bf4f07c017bd1489006ff8836cff097b6933355ee2b49b602", size = 44043, upload-time = "2026-04-27T10:51:26.649Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" },
{ url = "https://files.pythonhosted.org/packages/99/78/4126abcbdbd3c559d43e0db7f7b9173fc6befe45d39a2856cc0b8ec2a5a6/python_multipart-0.0.27-py3-none-any.whl", hash = "sha256:6fccfad17a27334bd0193681b369f476eda3409f17381a2d65aa7df3f7275645", size = 29254, upload-time = "2026-04-27T10:51:24.997Z" },
]
[[package]]