diff --git a/.github/workflows/unit-test.yaml b/.github/workflows/unit-test.yaml index 5b5a503..8882828 100644 --- a/.github/workflows/unit-test.yaml +++ b/.github/workflows/unit-test.yaml @@ -65,6 +65,7 @@ jobs: with: path: ${{ env.pythonLocation }} key: ${{ steps.get-cache-key.outputs.key }} + # could using cache of previous ver to reuse unchanged packages restore-keys: ${{ runner.os }}-py${{ matrix.python-version }} - name: Check cache hit @@ -81,7 +82,7 @@ jobs: steps.check-cache-hit.outputs.check != 'true' run: | python -m pip install --upgrade pip - pip install --ignore-installed -e .[dev] + pip install -U --upgrade-strategy eager -e .[dev] - name: New dependencies cache for key ${{ steps.restore-dependencies.outputs.cache-primary-key }} if: | diff --git a/knowledgehub/__init__.py b/knowledgehub/__init__.py index 404c9ab..c88c13c 100644 --- a/knowledgehub/__init__.py +++ b/knowledgehub/__init__.py @@ -22,4 +22,4 @@ try: except ImportError: pass -__version__ = "0.3.0" +__version__ = "0.3.1" diff --git a/knowledgehub/composite/branching.py b/knowledgehub/composite/branching.py index 2c84a69..c082d5d 100644 --- a/knowledgehub/composite/branching.py +++ b/knowledgehub/composite/branching.py @@ -17,7 +17,7 @@ class SimpleBranchingPipeline(BaseComponent): from kotaemon.composite import GatedLinearPipeline from kotaemon.llms.chats.openai import AzureChatOpenAI from kotaemon.post_processing.extractor import RegexExtractor - from kotaemon.prompt.base import BasePromptComponent + from kotaemon.llms import BasePromptComponent def identity(x): return x @@ -90,7 +90,7 @@ class GatedBranchingPipeline(SimpleBranchingPipeline): from kotaemon.composite import GatedLinearPipeline from kotaemon.llms.chats.openai import AzureChatOpenAI from kotaemon.post_processing.extractor import RegexExtractor - from kotaemon.prompt.base import BasePromptComponent + from kotaemon.llms import BasePromptComponent def identity(x): return x @@ -150,9 +150,9 @@ class GatedBranchingPipeline(SimpleBranchingPipeline): if __name__ == "__main__": import dotenv + from kotaemon.llms import BasePromptComponent from kotaemon.llms.chats.openai import AzureChatOpenAI from kotaemon.post_processing.extractor import RegexExtractor - from kotaemon.prompt.base import BasePromptComponent def identity(x): return x diff --git a/knowledgehub/composite/linear.py b/knowledgehub/composite/linear.py index 2f1cefd..9f1d0d3 100644 --- a/knowledgehub/composite/linear.py +++ b/knowledgehub/composite/linear.py @@ -2,9 +2,7 @@ from typing import Any, Callable, Optional, Union from kotaemon.base import BaseComponent from kotaemon.base.schema import Document, IO_Type -from kotaemon.llms.chats.base import ChatLLM -from kotaemon.llms.completions.base import LLM -from kotaemon.prompt.base import BasePromptComponent +from kotaemon.llms import LLM, BasePromptComponent, ChatLLM class SimpleLinearPipeline(BaseComponent): @@ -22,7 +20,7 @@ class SimpleLinearPipeline(BaseComponent): Example Usage: from kotaemon.llms.chats.openai import AzureChatOpenAI - from kotaemon.prompt.base import BasePromptComponent + from kotaemon.llms import BasePromptComponent def identity(x): return x @@ -89,7 +87,7 @@ class GatedLinearPipeline(SimpleLinearPipeline): Example Usage: from kotaemon.llms.chats.openai import AzureChatOpenAI from kotaemon.post_processing.extractor import RegexExtractor - from kotaemon.prompt.base import BasePromptComponent + from kotaemon.llms import BasePromptComponent def identity(x): return x diff --git a/knowledgehub/llms/__init__.py b/knowledgehub/llms/__init__.py index 5d725a0..7f69cd1 100644 --- a/knowledgehub/llms/__init__.py +++ b/knowledgehub/llms/__init__.py @@ -2,12 +2,22 @@ from langchain.schema.messages import AIMessage, SystemMessage from .chats import AzureChatOpenAI, ChatLLM from .chats.base import BaseMessage, HumanMessage +from .completions import LLM, AzureOpenAI, OpenAI +from .prompts import BasePromptComponent, PromptTemplate __all__ = [ + # chat-specific components "ChatLLM", - "AzureChatOpenAI", "BaseMessage", "HumanMessage", "AIMessage", "SystemMessage", + "AzureChatOpenAI", + # completion-specific components + "LLM", + "OpenAI", + "AzureOpenAI", + # prompt-specific components + "BasePromptComponent", + "PromptTemplate", ] diff --git a/knowledgehub/llms/completions/__init__.py b/knowledgehub/llms/completions/__init__.py index e69de29..b980944 100644 --- a/knowledgehub/llms/completions/__init__.py +++ b/knowledgehub/llms/completions/__init__.py @@ -0,0 +1,4 @@ +from .base import LLM +from .openai import AzureOpenAI, OpenAI + +__all__ = ["LLM", "OpenAI", "AzureOpenAI"] diff --git a/knowledgehub/llms/completions/base.py b/knowledgehub/llms/completions/base.py index 9f8cd4b..15adf89 100644 --- a/knowledgehub/llms/completions/base.py +++ b/knowledgehub/llms/completions/base.py @@ -64,7 +64,3 @@ class LangchainLLM(LLM): setattr(self.agent, name, value) else: super().__setattr__(name, value) - - -class LLMChat(BaseComponent): - pass diff --git a/knowledgehub/llms/prompts/__init__.py b/knowledgehub/llms/prompts/__init__.py new file mode 100644 index 0000000..6a38456 --- /dev/null +++ b/knowledgehub/llms/prompts/__init__.py @@ -0,0 +1,4 @@ +from .base import BasePromptComponent +from .template import PromptTemplate + +__all__ = ["BasePromptComponent", "PromptTemplate"] diff --git a/knowledgehub/prompt/base.py b/knowledgehub/llms/prompts/base.py similarity index 98% rename from knowledgehub/prompt/base.py rename to knowledgehub/llms/prompts/base.py index 494f2ce..d0d8d8c 100644 --- a/knowledgehub/prompt/base.py +++ b/knowledgehub/llms/prompts/base.py @@ -1,7 +1,7 @@ from typing import Callable, Union -from kotaemon.base import BaseComponent, Document -from kotaemon.prompt.template import PromptTemplate +from ...base import BaseComponent, Document +from .template import PromptTemplate class BasePromptComponent(BaseComponent): diff --git a/knowledgehub/prompt/template.py b/knowledgehub/llms/prompts/template.py similarity index 100% rename from knowledgehub/prompt/template.py rename to knowledgehub/llms/prompts/template.py diff --git a/knowledgehub/pipelines/agents/base.py b/knowledgehub/pipelines/agents/base.py index a3bc603..cf8f2fe 100644 --- a/knowledgehub/pipelines/agents/base.py +++ b/knowledgehub/pipelines/agents/base.py @@ -3,10 +3,10 @@ from typing import Dict, List, Optional, Union from pydantic import BaseModel +from kotaemon.llms import PromptTemplate from kotaemon.llms.chats.base import ChatLLM from kotaemon.llms.completions.base import LLM from kotaemon.pipelines.tools import BaseTool -from kotaemon.prompt.template import PromptTemplate BaseLLM = Union[ChatLLM, LLM] diff --git a/knowledgehub/pipelines/agents/react/agent.py b/knowledgehub/pipelines/agents/react/agent.py index 2d8ac53..d900f35 100644 --- a/knowledgehub/pipelines/agents/react/agent.py +++ b/knowledgehub/pipelines/agents/react/agent.py @@ -4,7 +4,7 @@ from typing import Dict, List, Optional, Tuple, Type, Union from pydantic import BaseModel, create_model -from kotaemon.prompt.template import PromptTemplate +from kotaemon.llms import PromptTemplate from ..base import AgentOutput, AgentType, BaseAgent, BaseLLM, BaseTool from ..output.base import AgentAction, AgentFinish diff --git a/knowledgehub/pipelines/agents/react/prompt.py b/knowledgehub/pipelines/agents/react/prompt.py index ba80b16..7b583db 100644 --- a/knowledgehub/pipelines/agents/react/prompt.py +++ b/knowledgehub/pipelines/agents/react/prompt.py @@ -1,6 +1,6 @@ # flake8: noqa -from kotaemon.prompt.template import PromptTemplate +from kotaemon.llms import PromptTemplate zero_shot_react_prompt = PromptTemplate( template="""Answer the following questions as best you can. You have access to the following tools: diff --git a/knowledgehub/pipelines/agents/rewoo/agent.py b/knowledgehub/pipelines/agents/rewoo/agent.py index 1247c91..78d1119 100644 --- a/knowledgehub/pipelines/agents/rewoo/agent.py +++ b/knowledgehub/pipelines/agents/rewoo/agent.py @@ -5,9 +5,7 @@ from typing import Any, Dict, List, Optional, Tuple, Type, Union from pydantic import BaseModel, create_model -from kotaemon.llms.chats.base import ChatLLM -from kotaemon.llms.completions.base import LLM -from kotaemon.prompt.template import PromptTemplate +from kotaemon.llms import LLM, ChatLLM, PromptTemplate from ..base import AgentOutput, AgentType, BaseAgent, BaseLLM, BaseTool from ..output.base import BaseScratchPad diff --git a/knowledgehub/pipelines/agents/rewoo/planner.py b/knowledgehub/pipelines/agents/rewoo/planner.py index 89c1218..7deeb27 100644 --- a/knowledgehub/pipelines/agents/rewoo/planner.py +++ b/knowledgehub/pipelines/agents/rewoo/planner.py @@ -1,8 +1,7 @@ from typing import Any, List, Optional, Union -from kotaemon.base import BaseComponent -from kotaemon.prompt.template import PromptTemplate - +from ....base import BaseComponent +from ....llms import PromptTemplate from ..base import BaseLLM, BaseTool from ..output.base import BaseScratchPad from .prompt import zero_shot_planner_prompt diff --git a/knowledgehub/pipelines/agents/rewoo/prompt.py b/knowledgehub/pipelines/agents/rewoo/prompt.py index 569e89f..24ecb30 100644 --- a/knowledgehub/pipelines/agents/rewoo/prompt.py +++ b/knowledgehub/pipelines/agents/rewoo/prompt.py @@ -1,6 +1,6 @@ # flake8: noqa -from kotaemon.prompt.template import PromptTemplate +from kotaemon.llms import PromptTemplate zero_shot_planner_prompt = PromptTemplate( template="""You are an AI agent who makes step-by-step plans to solve a problem under the help of external tools. diff --git a/knowledgehub/pipelines/agents/rewoo/solver.py b/knowledgehub/pipelines/agents/rewoo/solver.py index 2e6b5c9..d2ce271 100644 --- a/knowledgehub/pipelines/agents/rewoo/solver.py +++ b/knowledgehub/pipelines/agents/rewoo/solver.py @@ -1,7 +1,7 @@ from typing import Any, List, Optional, Union from kotaemon.base import BaseComponent -from kotaemon.prompt.template import PromptTemplate +from kotaemon.llms import PromptTemplate from ..base import BaseLLM from ..output.base import BaseScratchPad diff --git a/knowledgehub/pipelines/cot.py b/knowledgehub/pipelines/cot.py index e1ff0c1..e6100b7 100644 --- a/knowledgehub/pipelines/cot.py +++ b/knowledgehub/pipelines/cot.py @@ -4,8 +4,8 @@ from typing import List from theflow import Compose, Node, Param from kotaemon.base import BaseComponent +from kotaemon.llms import BasePromptComponent from kotaemon.llms.chats.openai import AzureChatOpenAI -from kotaemon.prompt.base import BasePromptComponent class Thought(BaseComponent): diff --git a/knowledgehub/pipelines/qa.py b/knowledgehub/pipelines/qa.py index d43bfa2..3a22ef2 100644 --- a/knowledgehub/pipelines/qa.py +++ b/knowledgehub/pipelines/qa.py @@ -9,11 +9,11 @@ from kotaemon.base import BaseComponent from kotaemon.base.schema import RetrievedDocument from kotaemon.docstores import InMemoryDocumentStore from kotaemon.embeddings import AzureOpenAIEmbeddings +from kotaemon.llms import PromptTemplate from kotaemon.llms.chats.openai import AzureChatOpenAI from kotaemon.pipelines.agents import BaseAgent from kotaemon.pipelines.retrieving import RetrieveDocumentFromVectorStorePipeline from kotaemon.pipelines.tools import ComponentTool -from kotaemon.prompt.template import PromptTemplate from kotaemon.vectorstores import InMemoryVectorStore from .utils import file_names_to_collection_name diff --git a/knowledgehub/pipelines/tools/llm.py b/knowledgehub/pipelines/tools/llm.py index a4da1a7..948b8ff 100644 --- a/knowledgehub/pipelines/tools/llm.py +++ b/knowledgehub/pipelines/tools/llm.py @@ -2,9 +2,7 @@ from typing import AnyStr, Optional, Type, Union from pydantic import BaseModel, Field -from kotaemon.llms.chats.base import ChatLLM -from kotaemon.llms.chats.openai import AzureChatOpenAI -from kotaemon.llms.completions.base import LLM +from kotaemon.llms import LLM, AzureChatOpenAI, ChatLLM from .base import BaseTool, ToolException diff --git a/knowledgehub/prompt/__init__.py b/knowledgehub/prompt/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_composite.py b/tests/test_composite.py index dadffb5..f89310c 100644 --- a/tests/test_composite.py +++ b/tests/test_composite.py @@ -9,9 +9,9 @@ from kotaemon.composite import ( SimpleBranchingPipeline, SimpleLinearPipeline, ) +from kotaemon.llms import BasePromptComponent from kotaemon.llms.chats.openai import AzureChatOpenAI from kotaemon.post_processing.extractor import RegexExtractor -from kotaemon.prompt.base import BasePromptComponent _openai_chat_completion_response = ChatCompletion.parse_obj( { diff --git a/tests/test_prompt.py b/tests/test_prompt.py index 915c67b..1f47df7 100644 --- a/tests/test_prompt.py +++ b/tests/test_prompt.py @@ -1,9 +1,8 @@ import pytest from kotaemon.base import Document +from kotaemon.llms import BasePromptComponent, PromptTemplate from kotaemon.post_processing.extractor import RegexExtractor -from kotaemon.prompt.base import BasePromptComponent -from kotaemon.prompt.template import PromptTemplate def test_set_attributes(): diff --git a/tests/test_template.py b/tests/test_template.py index c0e586c..cf7ad46 100644 --- a/tests/test_template.py +++ b/tests/test_template.py @@ -1,6 +1,6 @@ import pytest -from kotaemon.prompt.template import PromptTemplate +from kotaemon.llms import PromptTemplate def test_prompt_template_creation():