Make cache_interactor injectable (#321)

* make cache interactor injectable

* version bump

* update lockfile
This commit is contained in:
Chris Trevino 2024-11-18 11:02:27 -08:00 коммит произвёл GitHub
Родитель a2ab2f64b5
Коммит 19c9786a68
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
4 изменённых файлов: 8 добавлений и 4 удалений

Просмотреть файл

@ -31,6 +31,7 @@ def create_openai_chat_llm(
*,
client: OpenAIClient | None = None,
cache: Cache | None = None,
cache_interactor: CacheInteractor | None = None,
events: LLMEvents | None = None,
) -> OpenAIChatLLM:
"""Create an OpenAI chat LLM."""
@ -43,6 +44,7 @@ def create_openai_chat_llm(
client=client,
config=config,
cache=cache,
cache_interactor=cache_interactor,
events=events,
limiter=limiter,
)
@ -64,6 +66,7 @@ def _create_openai_text_chat_llm(
config: OpenAIConfig,
limiter: Limiter,
cache: Cache | None,
cache_interactor: CacheInteractor | None,
events: LLMEvents | None,
) -> OpenAITextChatLLM:
operation = "chat"
@ -71,7 +74,7 @@ def _create_openai_text_chat_llm(
client,
model=config.model,
model_parameters=config.chat_parameters,
cache=CacheInteractor(events, cache),
cache=cache_interactor or CacheInteractor(events, cache),
events=events,
json_handler=create_json_handler(config.json_strategy, config.max_json_retries),
usage_extractor=OpenAIUsageExtractor(),

Просмотреть файл

@ -20,6 +20,7 @@ def create_openai_embeddings_llm(
*,
client: OpenAIClient | None = None,
cache: Cache | None = None,
cache_interactor: CacheInteractor | None = None,
events: LLMEvents | None = None,
) -> OpenAIEmbeddingsLLM:
"""Create an OpenAI embeddings LLM."""
@ -33,7 +34,7 @@ def create_openai_embeddings_llm(
client,
model=config.model,
model_parameters=config.embeddings_parameters,
cache=CacheInteractor(events, cache),
cache=cache_interactor or CacheInteractor(events, cache),
events=events,
usage_extractor=OpenAIUsageExtractor(),
variable_injector=VariableInjector(),

Просмотреть файл

@ -1,6 +1,6 @@
[project]
name = "fnllm"
version = "0.0.9"
version = "0.0.10"
description = "A function-based LLM protocol and wrapper."
authors = [
{name="Chris Trevino", email="chtrevin@microsoft.com"},

Просмотреть файл

@ -528,7 +528,7 @@ wheels = [
[[package]]
name = "fnllm"
version = "0.0.9"
version = "0.0.10"
source = { editable = "python/fnllm" }
dependencies = [
{ name = "aiolimiter" },