Move prompts into LLMs module (#70)
Since the only usage of prompt is within LLMs, it is reasonable to keep it within the LLM module. This way, it would be easier to discover module, and make the code base less complicated. Changes: * Move prompt components into llms * Bump version 0.3.1 * Make pip install dependencies in eager mode --------- Co-authored-by: ian <ian@cinnamon.is>
This commit is contained in:
committed by
GitHub
parent
8532138842
commit
693ed39de4
@@ -9,9 +9,9 @@ from kotaemon.composite import (
|
||||
SimpleBranchingPipeline,
|
||||
SimpleLinearPipeline,
|
||||
)
|
||||
from kotaemon.llms import BasePromptComponent
|
||||
from kotaemon.llms.chats.openai import AzureChatOpenAI
|
||||
from kotaemon.post_processing.extractor import RegexExtractor
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
|
||||
_openai_chat_completion_response = ChatCompletion.parse_obj(
|
||||
{
|
||||
|
@@ -1,9 +1,8 @@
|
||||
import pytest
|
||||
|
||||
from kotaemon.base import Document
|
||||
from kotaemon.llms import BasePromptComponent, PromptTemplate
|
||||
from kotaemon.post_processing.extractor import RegexExtractor
|
||||
from kotaemon.prompt.base import BasePromptComponent
|
||||
from kotaemon.prompt.template import PromptTemplate
|
||||
|
||||
|
||||
def test_set_attributes():
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from kotaemon.prompt.template import PromptTemplate
|
||||
from kotaemon.llms import PromptTemplate
|
||||
|
||||
|
||||
def test_prompt_template_creation():
|
||||
|
Reference in New Issue
Block a user