mirror of
https://github.com/crewAIInc/crewAI.git
synced 2026-01-28 17:48:13 +00:00
fix: enhance file store with fallback memory cache when aiocache is n… (#4283)
* fix: enhance file store with fallback memory cache when aiocache is not installed - Added a simple in-memory cache implementation to serve as a fallback when the aiocache library is unavailable. - Improved error handling for the aiocache import, ensuring that the application can still function without it. - This change enhances the robustness of the file store utility by providing a reliable caching mechanism in various environments. * drop fallback * Potential fix for pull request finding 'Unused global variable' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --------- Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com>
This commit is contained in:
@@ -5,17 +5,29 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Coroutine
|
||||
import concurrent.futures
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
from uuid import UUID
|
||||
|
||||
from aiocache import Cache # type: ignore[import-untyped]
|
||||
from aiocache.serializers import PickleSerializer # type: ignore[import-untyped]
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aiocache import Cache
|
||||
from crewai_files import FileInput
|
||||
|
||||
_file_store = Cache(Cache.MEMORY, serializer=PickleSerializer())
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_file_store: Cache | None = None
|
||||
|
||||
try:
|
||||
from aiocache import Cache
|
||||
from aiocache.serializers import PickleSerializer
|
||||
|
||||
_file_store = Cache(Cache.MEMORY, serializer=PickleSerializer())
|
||||
except ImportError:
|
||||
logger.debug(
|
||||
"aiocache is not installed. File store features will be disabled. "
|
||||
"Install with: uv add aiocache"
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -59,6 +71,8 @@ async def astore_files(
|
||||
files: Dictionary mapping names to file inputs.
|
||||
ttl: Time-to-live in seconds.
|
||||
"""
|
||||
if _file_store is None:
|
||||
return
|
||||
await _file_store.set(f"{_CREW_PREFIX}{execution_id}", files, ttl=ttl)
|
||||
|
||||
|
||||
@@ -71,6 +85,8 @@ async def aget_files(execution_id: UUID) -> dict[str, FileInput] | None:
|
||||
Returns:
|
||||
Dictionary of files or None if not found.
|
||||
"""
|
||||
if _file_store is None:
|
||||
return None
|
||||
result: dict[str, FileInput] | None = await _file_store.get(
|
||||
f"{_CREW_PREFIX}{execution_id}"
|
||||
)
|
||||
@@ -83,6 +99,8 @@ async def aclear_files(execution_id: UUID) -> None:
|
||||
Args:
|
||||
execution_id: Unique identifier for the crew execution.
|
||||
"""
|
||||
if _file_store is None:
|
||||
return
|
||||
await _file_store.delete(f"{_CREW_PREFIX}{execution_id}")
|
||||
|
||||
|
||||
@@ -98,6 +116,8 @@ async def astore_task_files(
|
||||
files: Dictionary mapping names to file inputs.
|
||||
ttl: Time-to-live in seconds.
|
||||
"""
|
||||
if _file_store is None:
|
||||
return
|
||||
await _file_store.set(f"{_TASK_PREFIX}{task_id}", files, ttl=ttl)
|
||||
|
||||
|
||||
@@ -110,6 +130,8 @@ async def aget_task_files(task_id: UUID) -> dict[str, FileInput] | None:
|
||||
Returns:
|
||||
Dictionary of files or None if not found.
|
||||
"""
|
||||
if _file_store is None:
|
||||
return None
|
||||
result: dict[str, FileInput] | None = await _file_store.get(
|
||||
f"{_TASK_PREFIX}{task_id}"
|
||||
)
|
||||
@@ -122,6 +144,8 @@ async def aclear_task_files(task_id: UUID) -> None:
|
||||
Args:
|
||||
task_id: Unique identifier for the task.
|
||||
"""
|
||||
if _file_store is None:
|
||||
return
|
||||
await _file_store.delete(f"{_TASK_PREFIX}{task_id}")
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user