Since the only usage of prompt is within LLMs, it is reasonable to keep it within the LLM module. This way, it would be easier to discover module, and make the code base less complicated. Changes: * Move prompt components into llms * Bump version 0.3.1 * Make pip install dependencies in eager mode --------- Co-authored-by: ian <ian@cinnamon.is>
24 lines
589 B
Python
24 lines
589 B
Python
from langchain.schema.messages import AIMessage, SystemMessage
|
|
|
|
from .chats import AzureChatOpenAI, ChatLLM
|
|
from .chats.base import BaseMessage, HumanMessage
|
|
from .completions import LLM, AzureOpenAI, OpenAI
|
|
from .prompts import BasePromptComponent, PromptTemplate
|
|
|
|
__all__ = [
|
|
# chat-specific components
|
|
"ChatLLM",
|
|
"BaseMessage",
|
|
"HumanMessage",
|
|
"AIMessage",
|
|
"SystemMessage",
|
|
"AzureChatOpenAI",
|
|
# completion-specific components
|
|
"LLM",
|
|
"OpenAI",
|
|
"AzureOpenAI",
|
|
# prompt-specific components
|
|
"BasePromptComponent",
|
|
"PromptTemplate",
|
|
]
|