diff --git a/flowsettings.py b/flowsettings.py index 0e149c3..281fd63 100644 --- a/flowsettings.py +++ b/flowsettings.py @@ -176,22 +176,22 @@ if config("LOCAL_MODEL", default=""): } # additional LLM configurations -# KH_LLMS["claude"] = { -# "spec": { -# "__type__": "kotaemon.llms.chats.LCAnthropicChat", -# "model_name": "claude-3-5-sonnet-20240620", -# "api_key": "your-key", -# }, -# "default": False, -# } -# KH_LLMS["gemini"] = { -# "spec": { -# "__type__": "kotaemon.llms.chats.LCGeminiChat", -# "model_name": "gemini-1.5-pro", -# "api_key": "your-key", -# }, -# "default": False, -# } +KH_LLMS["claude"] = { + "spec": { + "__type__": "kotaemon.llms.chats.LCAnthropicChat", + "model_name": "claude-3-5-sonnet-20240620", + "api_key": "your-key", + }, + "default": False, +} +KH_LLMS["gemini"] = { + "spec": { + "__type__": "kotaemon.llms.chats.LCGeminiChat", + "model_name": "gemini-1.5-pro", + "api_key": "your-key", + }, + "default": False, +} KH_LLMS["groq"] = { "spec": { "__type__": "kotaemon.llms.ChatOpenAI", diff --git a/libs/kotaemon/pyproject.toml b/libs/kotaemon/pyproject.toml index e3299b7..27170c0 100644 --- a/libs/kotaemon/pyproject.toml +++ b/libs/kotaemon/pyproject.toml @@ -30,8 +30,11 @@ dependencies = [ "langchain>=0.1.16,<0.2.0", "langchain-community>=0.0.34,<0.1.0", "langchain-openai>=0.1.4,<0.2.0", + "langchain-anthropic", + "langchain-google-genai", "llama-hub>=0.0.79,<0.1.0", "llama-index>=0.10.40,<0.11.0", + "fastapi<=0.112.1", "llama-index-vector-stores-chroma>=0.1.9", "llama-index-vector-stores-lancedb", "llama-index-vector-stores-milvus",