Brandon/provide llm additional params (#2018)
Some checks failed
Mark stale issues and pull requests / stale (push) Has been cancelled

* Clean up to match enterprise

* add additional params to LLM calls

* make sure additional params are getting passed to llm

* update docs

* drop print
This commit is contained in:
Brandon Hancock (bhancock_ai)
2025-01-31 12:53:58 -05:00
committed by GitHub
parent ddb7958da7
commit 23b9e10323
3 changed files with 63 additions and 1 deletions

View File

@@ -137,6 +137,7 @@ class LLM:
api_version: Optional[str] = None,
api_key: Optional[str] = None,
callbacks: List[Any] = [],
**kwargs,
):
self.model = model
self.timeout = timeout
@@ -158,6 +159,7 @@ class LLM:
self.api_key = api_key
self.callbacks = callbacks
self.context_window_size = 0
self.additional_params = kwargs
litellm.drop_params = True
@@ -240,6 +242,7 @@ class LLM:
"api_key": self.api_key,
"stream": False,
"tools": tools,
**self.additional_params,
}
# Remove None values from params