enable config through .env
This commit is contained in:
@@ -137,14 +137,14 @@ class AzureOpenAIEmbeddings(LCEmbeddingMixin, BaseEmbeddings):
|
||||
azure_endpoint: Optional[str] = None,
|
||||
deployment: Optional[str] = None,
|
||||
openai_api_key: Optional[str] = None,
|
||||
openai_api_version: Optional[str] = None,
|
||||
api_version: Optional[str] = None,
|
||||
request_timeout: Optional[float] = None,
|
||||
**params,
|
||||
):
|
||||
super().__init__(
|
||||
azure_endpoint=azure_endpoint,
|
||||
deployment=deployment,
|
||||
openai_api_version=openai_api_version,
|
||||
api_version=api_version,
|
||||
openai_api_key=openai_api_key,
|
||||
request_timeout=request_timeout,
|
||||
**params,
|
||||
|
@@ -2,7 +2,7 @@ from kotaemon.base.schema import AIMessage, BaseMessage, HumanMessage, SystemMes
|
||||
|
||||
from .base import BaseLLM
|
||||
from .branching import GatedBranchingPipeline, SimpleBranchingPipeline
|
||||
from .chats import AzureChatOpenAI, ChatLLM, EndpointChatLLM, LlamaCppChat
|
||||
from .chats import AzureChatOpenAI, ChatLLM, ChatOpenAI, EndpointChatLLM, LlamaCppChat
|
||||
from .completions import LLM, AzureOpenAI, LlamaCpp, OpenAI
|
||||
from .cot import ManualSequentialChainOfThought, Thought
|
||||
from .linear import GatedLinearPipeline, SimpleLinearPipeline
|
||||
@@ -17,6 +17,7 @@ __all__ = [
|
||||
"HumanMessage",
|
||||
"AIMessage",
|
||||
"SystemMessage",
|
||||
"ChatOpenAI",
|
||||
"AzureChatOpenAI",
|
||||
"LlamaCppChat",
|
||||
# completion-specific components
|
||||
|
@@ -1,11 +1,12 @@
|
||||
from .base import ChatLLM
|
||||
from .endpoint_based import EndpointChatLLM
|
||||
from .langchain_based import AzureChatOpenAI, LCChatMixin
|
||||
from .langchain_based import AzureChatOpenAI, ChatOpenAI, LCChatMixin
|
||||
from .llamacpp import LlamaCppChat
|
||||
|
||||
__all__ = [
|
||||
"ChatLLM",
|
||||
"EndpointChatLLM",
|
||||
"ChatOpenAI",
|
||||
"AzureChatOpenAI",
|
||||
"LCChatMixin",
|
||||
"LlamaCppChat",
|
||||
|
@@ -165,7 +165,36 @@ class LCChatMixin:
|
||||
raise ValueError(f"Invalid param {path}")
|
||||
|
||||
|
||||
class ChatOpenAI(LCChatMixin, ChatLLM): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
openai_api_base: str | None = None,
|
||||
openai_api_key: str | None = None,
|
||||
model: str | None = None,
|
||||
temperature: float = 0.7,
|
||||
request_timeout: float | None = None,
|
||||
**params,
|
||||
):
|
||||
super().__init__(
|
||||
openai_api_base=openai_api_base,
|
||||
openai_api_key=openai_api_key,
|
||||
model=model,
|
||||
temperature=temperature,
|
||||
request_timeout=request_timeout,
|
||||
**params,
|
||||
)
|
||||
|
||||
def _get_lc_class(self):
|
||||
try:
|
||||
from langchain_openai import ChatOpenAI
|
||||
except ImportError:
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
|
||||
return ChatOpenAI
|
||||
|
||||
|
||||
class AzureChatOpenAI(LCChatMixin, ChatLLM): # type: ignore
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
azure_endpoint: str | None = None,
|
||||
|
Reference in New Issue
Block a user