Move prompts into LLMs module (#70)
Since the only usage of prompt is within LLMs, it is reasonable to keep it within the LLM module. This way, it would be easier to discover module, and make the code base less complicated. Changes: * Move prompt components into llms * Bump version 0.3.1 * Make pip install dependencies in eager mode --------- Co-authored-by: ian <ian@cinnamon.is>
This commit is contained in:
committed by
GitHub
parent
8532138842
commit
693ed39de4
@@ -17,7 +17,7 @@ class SimpleBranchingPipeline(BaseComponent):
|
||||
from kotaemon.composite import GatedLinearPipeline
|
||||
from kotaemon.llms.chats.openai import AzureChatOpenAI
|
||||
from kotaemon.post_processing.extractor import RegexExtractor
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
from kotaemon.llms import BasePromptComponent
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
@@ -90,7 +90,7 @@ class GatedBranchingPipeline(SimpleBranchingPipeline):
|
||||
from kotaemon.composite import GatedLinearPipeline
|
||||
from kotaemon.llms.chats.openai import AzureChatOpenAI
|
||||
from kotaemon.post_processing.extractor import RegexExtractor
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
from kotaemon.llms import BasePromptComponent
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
@@ -150,9 +150,9 @@ class GatedBranchingPipeline(SimpleBranchingPipeline):
|
||||
if __name__ == "__main__":
|
||||
import dotenv
|
||||
|
||||
from kotaemon.llms import BasePromptComponent
|
||||
from kotaemon.llms.chats.openai import AzureChatOpenAI
|
||||
from kotaemon.post_processing.extractor import RegexExtractor
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
|
@@ -2,9 +2,7 @@ from typing import Any, Callable, Optional, Union
|
||||
|
||||
from kotaemon.base import BaseComponent
|
||||
from kotaemon.base.schema import Document, IO_Type
|
||||
from kotaemon.llms.chats.base import ChatLLM
|
||||
from kotaemon.llms.completions.base import LLM
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
from kotaemon.llms import LLM, BasePromptComponent, ChatLLM
|
||||
|
||||
|
||||
class SimpleLinearPipeline(BaseComponent):
|
||||
@@ -22,7 +20,7 @@ class SimpleLinearPipeline(BaseComponent):
|
||||
|
||||
Example Usage:
|
||||
from kotaemon.llms.chats.openai import AzureChatOpenAI
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
from kotaemon.llms import BasePromptComponent
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
@@ -89,7 +87,7 @@ class GatedLinearPipeline(SimpleLinearPipeline):
|
||||
Example Usage:
|
||||
from kotaemon.llms.chats.openai import AzureChatOpenAI
|
||||
from kotaemon.post_processing.extractor import RegexExtractor
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
from kotaemon.llms import BasePromptComponent
|
||||
|
||||
def identity(x):
|
||||
return x
|
||||
|
Reference in New Issue
Block a user