[AUR-411] Adopt to Example2 project (#28)
Add the chatbot from Example2. Create the UI for chat.
This commit is contained in:
parent
533fffa6db
commit
6e7905cbc0
|
@ -22,4 +22,4 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "0.0.3"
|
||||
__version__ = "0.0.4"
|
||||
|
|
4
knowledgehub/chatbot/__init__.py
Normal file
4
knowledgehub/chatbot/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
from .base import BaseChatBot, ChatConversation
|
||||
from .simple_respondent import SimpleRespondentChatbot
|
||||
|
||||
__all__ = ["BaseChatBot", "SimpleRespondentChatbot", "ChatConversation"]
|
117
knowledgehub/chatbot/base.py
Normal file
117
knowledgehub/chatbot/base.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
from abc import abstractmethod
|
||||
from typing import List, Optional
|
||||
|
||||
from langchain.schema.messages import AIMessage, SystemMessage
|
||||
from theflow import Param, SessionCompose
|
||||
|
||||
from ..base import BaseComponent
|
||||
from ..llms.base import LLMInterface
|
||||
from ..llms.chats.base import BaseMessage, HumanMessage
|
||||
|
||||
|
||||
class BaseChatBot(BaseComponent):
|
||||
@abstractmethod
|
||||
def run(self, messages: List[BaseMessage]) -> LLMInterface:
|
||||
...
|
||||
|
||||
|
||||
def session_chat_storage(obj):
|
||||
"""Store using the bot location rather than the session location"""
|
||||
return obj._store_result
|
||||
|
||||
|
||||
class ChatConversation(SessionCompose):
|
||||
"""Base implementation of a chat bot component
|
||||
|
||||
A chatbot component should:
|
||||
- handle internal state, including history messages
|
||||
- return output for a given input
|
||||
"""
|
||||
|
||||
class Config:
|
||||
store_result = session_chat_storage
|
||||
|
||||
system_message: Param[str] = Param(default="")
|
||||
bot: BaseChatBot
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._history: List[BaseMessage] = []
|
||||
self._store_result = (
|
||||
f"{self.__module__}.{self.__class__.__name__},uninitiated_bot"
|
||||
)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def run(self, message: HumanMessage) -> Optional[BaseMessage]:
|
||||
"""Chat, given a message, return a response
|
||||
|
||||
Args:
|
||||
message: The message to respond to
|
||||
|
||||
Returns:
|
||||
The response to the message. If None, no response is sent.
|
||||
"""
|
||||
user_message = (
|
||||
HumanMessage(content=message) if isinstance(message, str) else message
|
||||
)
|
||||
self.history.append(user_message)
|
||||
|
||||
output = self.bot(self.history).text
|
||||
output_message = None
|
||||
if output is not None:
|
||||
output_message = AIMessage(content=output)
|
||||
self.history.append(output_message)
|
||||
|
||||
return output_message
|
||||
|
||||
def start_session(self):
|
||||
self._store_result = self.bot.config.store_result
|
||||
super().start_session()
|
||||
if not self.history and self.system_message:
|
||||
system_message = SystemMessage(content=self.system_message)
|
||||
self.history.append(system_message)
|
||||
|
||||
def end_session(self):
|
||||
super().end_session()
|
||||
self._history = []
|
||||
|
||||
def check_end(
|
||||
self,
|
||||
history: Optional[List[BaseMessage]] = None,
|
||||
user_message: Optional[HumanMessage] = None,
|
||||
bot_message: Optional[AIMessage] = None,
|
||||
) -> bool:
|
||||
"""Check if a conversation should end"""
|
||||
if user_message is not None and user_message.content == "":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def terminal_session(self):
|
||||
"""Create a terminal session"""
|
||||
self.start_session()
|
||||
print(">> Start chat:")
|
||||
|
||||
while True:
|
||||
human = HumanMessage(content=input("Human: "))
|
||||
if self.check_end(history=self.history, user_message=human):
|
||||
break
|
||||
|
||||
output = self(human)
|
||||
if output is None:
|
||||
print("AI: <No response>")
|
||||
else:
|
||||
print("AI:", output.content)
|
||||
|
||||
if self.check_end(history=self.history, bot_message=output):
|
||||
break
|
||||
|
||||
self.end_session()
|
||||
|
||||
@property
|
||||
def history(self):
|
||||
return self._history
|
||||
|
||||
@history.setter
|
||||
def history(self, value):
|
||||
self._history = value
|
||||
self._variablex()
|
13
knowledgehub/chatbot/simple_respondent.py
Normal file
13
knowledgehub/chatbot/simple_respondent.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
from theflow import Node
|
||||
|
||||
from ..llms import ChatLLM
|
||||
from .base import BaseChatBot
|
||||
|
||||
|
||||
class SimpleRespondentChatbot(BaseChatBot):
|
||||
"""Simple text respondent chatbot that essentially wraps around a chat LLM"""
|
||||
|
||||
llm: Node[ChatLLM]
|
||||
|
||||
def _get_message(self) -> str:
|
||||
return self.llm(self.history).text[0]
|
|
@ -29,8 +29,9 @@ main.add_command(promptui)
|
|||
|
||||
@promptui.command()
|
||||
@click.argument("export_path", nargs=1)
|
||||
@click.option("--output", default="promptui.yml", required=False)
|
||||
@click.option("--output", default="promptui.yml", show_default=True, required=False)
|
||||
def export(export_path, output):
|
||||
"""Export a pipeline to a config file"""
|
||||
import sys
|
||||
|
||||
from theflow.utils.modules import import_dotted_string
|
||||
|
@ -45,11 +46,119 @@ def export(export_path, output):
|
|||
|
||||
@promptui.command()
|
||||
@click.argument("run_path", required=False, default="promptui.yml")
|
||||
def run(run_path):
|
||||
@click.option(
|
||||
"--share",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
default=False,
|
||||
help="Share the app through Gradio. Requires --username to enable authentication.",
|
||||
)
|
||||
@click.option(
|
||||
"--username",
|
||||
required=False,
|
||||
help="Username for the user. If not provided, the promptui will not have "
|
||||
"authentication.",
|
||||
)
|
||||
@click.option(
|
||||
"--password",
|
||||
required=False,
|
||||
help="Password for the user. If not provided, will be prompted.",
|
||||
)
|
||||
@click.option(
|
||||
"--appname",
|
||||
required=False,
|
||||
default="The share app subdomain. Requires --share and --username",
|
||||
)
|
||||
@click.option(
|
||||
"--port",
|
||||
required=False,
|
||||
help="Port to run the app. If not provided, will $GRADIO_SERVER_PORT (7860)",
|
||||
)
|
||||
def run(run_path, share, username, password, appname, port):
|
||||
"""Run the UI from a config file
|
||||
|
||||
Examples:
|
||||
|
||||
\b
|
||||
# Run with default config file
|
||||
$ kh promptui run
|
||||
|
||||
\b
|
||||
# Run with username and password supplied
|
||||
$ kh promptui run --username admin --password password
|
||||
|
||||
\b
|
||||
# Run with username and prompted password
|
||||
$ kh promptui run --username admin
|
||||
|
||||
# Run and share to promptui
|
||||
# kh promptui run --username admin --password password --share --appname hey \
|
||||
--port 7861
|
||||
"""
|
||||
import sys
|
||||
|
||||
from kotaemon.contribs.promptui.ui import build_from_dict
|
||||
|
||||
build_from_dict(run_path)
|
||||
sys.path.append(os.getcwd())
|
||||
|
||||
check_config_format(run_path)
|
||||
demo = build_from_dict(run_path)
|
||||
|
||||
params: dict = {}
|
||||
if username is not None:
|
||||
if password is not None:
|
||||
auth = (username, password)
|
||||
else:
|
||||
auth = (username, click.prompt("Password", hide_input=True))
|
||||
params["auth"] = auth
|
||||
|
||||
port = int(port) if port else int(os.getenv("GRADIO_SERVER_PORT", "7860"))
|
||||
params["server_port"] = port
|
||||
|
||||
if share:
|
||||
if username is None:
|
||||
raise ValueError(
|
||||
"Username must be provided to enable authentication for sharing"
|
||||
)
|
||||
if appname:
|
||||
command = [
|
||||
"frpc",
|
||||
"http",
|
||||
"-l",
|
||||
str(port),
|
||||
"-i",
|
||||
"127.0.0.1",
|
||||
"--uc",
|
||||
"--sd",
|
||||
str(appname),
|
||||
"-n",
|
||||
str(appname + username),
|
||||
"--server_addr",
|
||||
"35.92.162.75:7000",
|
||||
"--token",
|
||||
"Wz807/DyC;#t;#/",
|
||||
"--disable_log_color",
|
||||
]
|
||||
import atexit
|
||||
import subprocess
|
||||
|
||||
proc = subprocess.Popen(
|
||||
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
def kill_proc():
|
||||
if proc is not None:
|
||||
print(f"Killing tunnel: https://{appname}.promptui.dm.cinnamon.is")
|
||||
proc.terminate()
|
||||
|
||||
atexit.register(kill_proc)
|
||||
|
||||
print(f"App is shared at https://{appname}.promptui.dm.cinnamon.is")
|
||||
else:
|
||||
params["share"] = True
|
||||
print("App is shared at Gradio")
|
||||
|
||||
demo.launch(**params)
|
||||
|
||||
|
||||
@main.command()
|
||||
|
|
|
@ -18,3 +18,26 @@ DEFAULT_COMPONENT_BY_TYPES = {
|
|||
"float": "number",
|
||||
"list": "dropdown",
|
||||
}
|
||||
|
||||
|
||||
def get_component(component_def: dict) -> gr.components.Component:
|
||||
"""Get the component based on component definition"""
|
||||
component_cls = None
|
||||
|
||||
if "component" in component_def:
|
||||
component = component_def["component"]
|
||||
if component not in SUPPORTED_COMPONENTS:
|
||||
raise ValueError(
|
||||
f"Unsupported UI component: {component}. "
|
||||
f"Must be one of {SUPPORTED_COMPONENTS}"
|
||||
)
|
||||
|
||||
component_cls = COMPONENTS_CLASS[component]
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Cannot decide the component from {component_def}. "
|
||||
"Please specify `component` with 1 of the following "
|
||||
f"values: {SUPPORTED_COMPONENTS}"
|
||||
)
|
||||
|
||||
return component_cls(**component_def.get("params", {}))
|
||||
|
|
|
@ -6,6 +6,7 @@ from typing import Any, Dict, Optional, Type, Union
|
|||
import yaml
|
||||
|
||||
from ...base import BaseComponent
|
||||
from ...chatbot import BaseChatBot
|
||||
from .base import DEFAULT_COMPONENT_BY_TYPES
|
||||
|
||||
|
||||
|
@ -47,8 +48,10 @@ def handle_param(param: dict) -> dict:
|
|||
if default is not None:
|
||||
params["value"] = default
|
||||
|
||||
type_: str = type(default).__name__ if default is not None else ""
|
||||
ui_component = DEFAULT_COMPONENT_BY_TYPES.get(type_, "text")
|
||||
ui_component = param.get("component_ui", "")
|
||||
if not ui_component:
|
||||
type_: str = type(default).__name__ if default is not None else ""
|
||||
ui_component = DEFAULT_COMPONENT_BY_TYPES.get(type_, "text")
|
||||
|
||||
return {
|
||||
"component": ui_component,
|
||||
|
@ -62,10 +65,14 @@ def handle_node(node: dict) -> dict:
|
|||
for name, param_def in node.get("params", {}).items():
|
||||
if isinstance(param_def["default_callback"], str):
|
||||
continue
|
||||
if param_def.get("ignore_ui", False):
|
||||
continue
|
||||
config[name] = handle_param(param_def)
|
||||
for name, node_def in node.get("nodes", {}).items():
|
||||
if isinstance(node_def["default_callback"], str):
|
||||
continue
|
||||
if node_def.get("ignore_ui", False):
|
||||
continue
|
||||
for key, value in handle_node(node_def["default"]).items():
|
||||
config[f"{name}.{key}"] = value
|
||||
for key, value in node_def["default_kwargs"].items():
|
||||
|
@ -113,13 +120,52 @@ def export_pipeline_to_config(
|
|||
pipeline = pipeline()
|
||||
|
||||
pipeline_def = pipeline.describe()
|
||||
config = {
|
||||
f"{pipeline.__module__}.{pipeline.__class__.__name__}": {
|
||||
ui_type = "chat" if isinstance(pipeline, BaseChatBot) else "simple"
|
||||
if ui_type == "chat":
|
||||
params = {f".bot.{k}": v for k, v in handle_node(pipeline_def).items()}
|
||||
params["system_message"] = {"component": "text", "params": {"value": ""}}
|
||||
config_obj: dict = {
|
||||
"ui-type": ui_type,
|
||||
"params": params,
|
||||
"inputs": {},
|
||||
"outputs": [],
|
||||
"logs": {
|
||||
"full_pipeline": {
|
||||
"input": {
|
||||
"step": ".",
|
||||
"getter": "_get_input",
|
||||
},
|
||||
"output": {
|
||||
"step": ".",
|
||||
"getter": "_get_output",
|
||||
},
|
||||
"preference": {
|
||||
"step": "preference",
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
else:
|
||||
config_obj = {
|
||||
"ui-type": ui_type,
|
||||
"params": handle_node(pipeline_def),
|
||||
"inputs": handle_input(pipeline),
|
||||
"outputs": [{"step": ".", "component": "text"}],
|
||||
"outputs": [{"step": ".", "getter": "_get_output", "component": "text"}],
|
||||
"logs": {
|
||||
"full_pipeline": {
|
||||
"input": {
|
||||
"step": ".",
|
||||
"getter": "_get_input",
|
||||
},
|
||||
"output": {
|
||||
"step": ".",
|
||||
"getter": "_get_output",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
config = {f"{pipeline.__module__}.{pipeline.__class__.__name__}": config_obj}
|
||||
if path is not None:
|
||||
old_config = config
|
||||
if Path(path).is_file():
|
||||
|
@ -127,6 +173,6 @@ def export_pipeline_to_config(
|
|||
old_config = yaml.safe_load(f)
|
||||
old_config.update(config)
|
||||
with open(path, "w") as f:
|
||||
yaml.safe_dump(old_config, f)
|
||||
yaml.safe_dump(old_config, f, sort_keys=False)
|
||||
|
||||
return config
|
||||
|
|
|
@ -11,6 +11,8 @@ from theflow.utils.modules import import_dotted_string
|
|||
|
||||
from kotaemon.base import BaseComponent
|
||||
|
||||
from .logs import ResultLog
|
||||
|
||||
|
||||
def from_log_to_dict(pipeline_cls: Type[BaseComponent], log_config: dict) -> dict:
|
||||
"""Export the log to panda dataframes
|
||||
|
@ -26,10 +28,13 @@ def from_log_to_dict(pipeline_cls: Type[BaseComponent], log_config: dict) -> dic
|
|||
pipeline_log_path = storage.url(pipeline_cls().config.store_result)
|
||||
dirs = list(sorted([f.path for f in os.scandir(pipeline_log_path) if f.is_dir()]))
|
||||
|
||||
# get resultlog callback
|
||||
resultlog = getattr(pipeline_cls, "_promptui_resultlog", ResultLog)
|
||||
allowed_resultlog_callbacks = {i for i in dir(resultlog) if not i.startswith("__")}
|
||||
|
||||
ids = []
|
||||
params: Dict[str, List[Any]] = {}
|
||||
inputs: Dict[str, List[Any]] = {}
|
||||
outputs: Dict[str, List[Any]] = {}
|
||||
logged_infos: Dict[str, List[Any]] = {}
|
||||
|
||||
for idx, each_dir in enumerate(dirs):
|
||||
ids.append(str(Path(each_dir).name))
|
||||
|
@ -44,34 +49,29 @@ def from_log_to_dict(pipeline_cls: Type[BaseComponent], log_config: dict) -> dic
|
|||
params[key] = [None] * len(dirs)
|
||||
params[key][idx] = value
|
||||
|
||||
# get the progress
|
||||
progress_file = os.path.join(each_dir, "progress.pkl")
|
||||
if os.path.exists(progress_file):
|
||||
with open(progress_file, "rb") as f:
|
||||
progress = pickle.load(f)
|
||||
|
||||
# get the inputs
|
||||
for each_input in log_config["inputs"]:
|
||||
name = each_input["name"]
|
||||
step = each_input["step"]
|
||||
if name not in inputs:
|
||||
inputs[name] = [None] * len(dirs)
|
||||
variable = each_input.get("variable", "")
|
||||
if variable:
|
||||
inputs[name][idx] = progress[step]["input"]["kwargs"][variable]
|
||||
for name, col_info in log_config.items():
|
||||
step = col_info["step"]
|
||||
getter = col_info.get("getter", None)
|
||||
if name not in logged_infos:
|
||||
logged_infos[name] = [None] * len(dirs)
|
||||
|
||||
info = progress[step]
|
||||
if getter:
|
||||
if getter in allowed_resultlog_callbacks:
|
||||
info = getattr(resultlog, getter)(info)
|
||||
else:
|
||||
inputs[name][idx] = progress[step]["input"]
|
||||
implicit_name = f"get_{name}"
|
||||
if implicit_name in allowed_resultlog_callbacks:
|
||||
info = getattr(resultlog, implicit_name)(info)
|
||||
logged_infos[name][idx] = info
|
||||
|
||||
# get the outputs
|
||||
for each_output in log_config["outputs"]:
|
||||
name = each_output["name"]
|
||||
step = each_output["step"]
|
||||
if name not in outputs:
|
||||
outputs[name] = [None] * len(dirs)
|
||||
outputs[name][idx] = progress[step]["output"]
|
||||
if each_output.get("item", ""):
|
||||
outputs[name][idx] = outputs[name][each_output["item"]]
|
||||
|
||||
return {"ids": ids, **params, **inputs, **outputs}
|
||||
return {"ids": ids, **params, **logged_infos}
|
||||
|
||||
|
||||
def export(config: dict, pipeline_def, output_path):
|
||||
|
|
16
knowledgehub/contribs/promptui/logs.py
Normal file
16
knowledgehub/contribs/promptui/logs.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
class ResultLog:
|
||||
"""Callback getter to get the desired log result
|
||||
|
||||
The callback resolution will be as follow:
|
||||
1. Explicit string name
|
||||
2. Implicitly by: `get_<name>`
|
||||
3. Pass through
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _get_input(obj):
|
||||
return obj["input"]
|
||||
|
||||
@staticmethod
|
||||
def _get_output(obj):
|
||||
return obj["output"]
|
95
knowledgehub/contribs/promptui/themes.py
Normal file
95
knowledgehub/contribs/promptui/themes.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import Iterable
|
||||
|
||||
from gradio.themes.base import Base
|
||||
from gradio.themes.utils import colors, fonts, sizes
|
||||
|
||||
|
||||
class John(Base):
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
primary_hue: colors.Color | str = colors.neutral,
|
||||
secondary_hue: colors.Color | str = colors.neutral,
|
||||
neutral_hue: colors.Color | str = colors.neutral,
|
||||
spacing_size: sizes.Size | str = sizes.spacing_lg,
|
||||
radius_size: sizes.Size | str = sizes.radius_none,
|
||||
text_size: sizes.Size | str = sizes.text_md,
|
||||
font: fonts.Font
|
||||
| str
|
||||
| Iterable[fonts.Font | str] = (
|
||||
fonts.GoogleFont("Quicksand"),
|
||||
"ui-sans-serif",
|
||||
"system-ui",
|
||||
"sans-serif",
|
||||
),
|
||||
font_mono: fonts.Font
|
||||
| str
|
||||
| Iterable[fonts.Font | str] = (
|
||||
fonts.GoogleFont("IBM Plex Mono"),
|
||||
"ui-monospace",
|
||||
"Consolas",
|
||||
"monospace",
|
||||
),
|
||||
):
|
||||
super().__init__(
|
||||
primary_hue=primary_hue,
|
||||
secondary_hue=secondary_hue,
|
||||
neutral_hue=neutral_hue,
|
||||
spacing_size=spacing_size,
|
||||
radius_size=radius_size,
|
||||
text_size=text_size,
|
||||
font=font,
|
||||
font_mono=font_mono,
|
||||
)
|
||||
self.name = "monochrome"
|
||||
super().set(
|
||||
# Colors
|
||||
slider_color="*neutral_900",
|
||||
slider_color_dark="*neutral_500",
|
||||
body_text_color="*neutral_900",
|
||||
block_label_text_color="*body_text_color",
|
||||
block_title_text_color="*body_text_color",
|
||||
body_text_color_subdued="*neutral_700",
|
||||
background_fill_primary_dark="*neutral_900",
|
||||
background_fill_secondary_dark="*neutral_800",
|
||||
block_background_fill_dark="*neutral_800",
|
||||
input_background_fill_dark="*neutral_700",
|
||||
# Button Colors
|
||||
button_primary_background_fill="*neutral_900",
|
||||
button_primary_background_fill_hover="*neutral_700",
|
||||
button_primary_text_color="white",
|
||||
button_primary_background_fill_dark="*neutral_600",
|
||||
button_primary_background_fill_hover_dark="*neutral_600",
|
||||
button_primary_text_color_dark="white",
|
||||
button_secondary_background_fill=(
|
||||
"linear-gradient(to bottom right, *neutral_100, *neutral_200)"
|
||||
),
|
||||
button_secondary_background_fill_hover=(
|
||||
"linear-gradient(to bottom right, *neutral_100, *neutral_100)"
|
||||
),
|
||||
button_secondary_background_fill_dark=(
|
||||
"linear-gradient(to bottom right, *neutral_600, *neutral_700)"
|
||||
),
|
||||
button_secondary_background_fill_hover_dark=(
|
||||
"linear-gradient(to bottom right, *neutral_600, *neutral_600)"
|
||||
),
|
||||
button_cancel_background_fill="*button_primary_background_fill",
|
||||
button_cancel_background_fill_hover="*button_primary_background_fill_hover",
|
||||
button_cancel_text_color="*button_primary_text_color",
|
||||
# Padding
|
||||
checkbox_label_padding="*spacing_md",
|
||||
button_large_padding="*spacing_lg",
|
||||
button_small_padding="*spacing_sm",
|
||||
# Borders
|
||||
block_border_width="0px",
|
||||
block_border_width_dark="1px",
|
||||
shadow_drop_lg="0 1px 4px 0 rgb(0 0 0 / 0.1)",
|
||||
block_shadow="*shadow_drop_lg",
|
||||
block_shadow_dark="none",
|
||||
# Block Labels
|
||||
block_title_text_weight="600",
|
||||
block_label_text_weight="600",
|
||||
block_label_text_size="*text_md",
|
||||
)
|
45
knowledgehub/contribs/promptui/ui/__init__.py
Normal file
45
knowledgehub/contribs/promptui/ui/__init__.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
from typing import Union
|
||||
|
||||
import gradio as gr
|
||||
import yaml
|
||||
from theflow.utils.modules import import_dotted_string
|
||||
|
||||
from ..themes import John
|
||||
from .chat import build_chat_ui
|
||||
from .pipeline import build_pipeline_ui
|
||||
|
||||
|
||||
def build_from_dict(config: Union[str, dict]):
|
||||
"""Build a full UI from YAML config file"""
|
||||
|
||||
if isinstance(config, str):
|
||||
with open(config) as f:
|
||||
config_dict: dict = yaml.safe_load(f)
|
||||
elif isinstance(config, dict):
|
||||
config_dict = config
|
||||
else:
|
||||
raise ValueError(
|
||||
f"config must be either a yaml path or a dict, got {type(config)}"
|
||||
)
|
||||
|
||||
demos = []
|
||||
for key, value in config_dict.items():
|
||||
pipeline_def = import_dotted_string(key, safe=False)
|
||||
if value["ui-type"] == "chat":
|
||||
demos.append(build_chat_ui(value, pipeline_def))
|
||||
else:
|
||||
demos.append(build_pipeline_ui(value, pipeline_def))
|
||||
if len(demos) == 1:
|
||||
demo = demos[0]
|
||||
else:
|
||||
demo = gr.TabbedInterface(
|
||||
demos,
|
||||
tab_names=list(config_dict.keys()),
|
||||
title="PromptUI from kotaemon",
|
||||
analytics_enabled=False,
|
||||
theme=John(),
|
||||
)
|
||||
|
||||
demo.queue()
|
||||
|
||||
return demo
|
282
knowledgehub/contribs/promptui/ui/chat.py
Normal file
282
knowledgehub/contribs/promptui/ui/chat.py
Normal file
|
@ -0,0 +1,282 @@
|
|||
import pickle
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import gradio as gr
|
||||
from theflow.storage import storage
|
||||
|
||||
from kotaemon.chatbot import ChatConversation
|
||||
from kotaemon.contribs.promptui.base import get_component
|
||||
from kotaemon.contribs.promptui.export import export
|
||||
|
||||
USAGE_INSTRUCTION = """## How to use:
|
||||
|
||||
1. Set the desired parameters.
|
||||
2. Click "New chat" to start a chat session with the supplied parameters. This
|
||||
set of parameters will persist until the end of the chat session. During an
|
||||
ongoing chat session, changing the parameters will not take any effect.
|
||||
3. Chat and interact with the chat bot on the right panel. You can add any
|
||||
additional input (if any), and they will be supplied to the chatbot.
|
||||
4. During chat, the log of the chat will show up in the "Output" tabs. This is
|
||||
empty by default, so if you want to show the log here, tell the AI developers
|
||||
to configure the UI settings.
|
||||
5. When finishing chat, select your preference in the radio box. Click "End chat".
|
||||
This will save the chat log and the preference to disk.
|
||||
6. To compare the result of different run, click "Export" to get an Excel
|
||||
spreadsheet summary of different run.
|
||||
|
||||
## Support:
|
||||
|
||||
In case of errors, you can:
|
||||
|
||||
- PromptUI instruction:
|
||||
https://github.com/Cinnamon/kotaemon/wiki/Utilities#prompt-engineering-ui
|
||||
- Create bug fix and make PR at: https://github.com/Cinnamon/kotaemon
|
||||
- Ping any of @john @tadashi @ian @jacky in Slack channel #llm-productization
|
||||
|
||||
## Contribute:
|
||||
|
||||
- Follow installation at: https://github.com/Cinnamon/kotaemon/
|
||||
"""
|
||||
|
||||
|
||||
def construct_chat_ui(
|
||||
config, func_new_chat, func_chat, func_end_chat, func_export_to_excel
|
||||
) -> gr.Blocks:
|
||||
"""Construct the prompt engineering UI for chat
|
||||
|
||||
Args:
|
||||
config: the UI config
|
||||
func_new_chat: the function for starting a new chat session
|
||||
func_chat: the function for chatting interaction
|
||||
func_end_chat: the function for ending and saving the chat
|
||||
func_export_to_excel: the function to export the logs to excel
|
||||
|
||||
Returns:
|
||||
the UI object
|
||||
"""
|
||||
inputs, outputs, params = [], [], []
|
||||
for name, component_def in config.get("inputs", {}).items():
|
||||
if "params" not in component_def:
|
||||
component_def["params"] = {}
|
||||
component_def["params"]["interactive"] = True
|
||||
component = get_component(component_def)
|
||||
if hasattr(component, "label") and not component.label: # type: ignore
|
||||
component.label = name # type: ignore
|
||||
|
||||
inputs.append(component)
|
||||
|
||||
for name, component_def in config.get("params", {}).items():
|
||||
if "params" not in component_def:
|
||||
component_def["params"] = {}
|
||||
component_def["params"]["interactive"] = True
|
||||
component = get_component(component_def)
|
||||
if hasattr(component, "label") and not component.label: # type: ignore
|
||||
component.label = name # type: ignore
|
||||
|
||||
params.append(component)
|
||||
|
||||
for idx, component_def in enumerate(config.get("outputs", [])):
|
||||
if "params" not in component_def:
|
||||
component_def["params"] = {}
|
||||
component_def["params"]["interactive"] = False
|
||||
component = get_component(component_def)
|
||||
if hasattr(component, "label") and not component.label: # type: ignore
|
||||
component.label = f"Output {idx}" # type: ignore
|
||||
|
||||
outputs.append(component)
|
||||
|
||||
sess = gr.State(value=None)
|
||||
chatbot = gr.Chatbot(label="Chatbot")
|
||||
chat = gr.ChatInterface(func_chat, chatbot=chatbot, additional_inputs=[sess])
|
||||
param_state = gr.Textbox(interactive=False)
|
||||
|
||||
with gr.Blocks(analytics_enabled=False, title="Welcome to PromptUI") as demo:
|
||||
sess.render()
|
||||
with gr.Accordion(label="HOW TO", open=False):
|
||||
gr.Markdown(USAGE_INSTRUCTION)
|
||||
with gr.Row():
|
||||
run_btn = gr.Button("New chat")
|
||||
run_btn.click(
|
||||
func_new_chat,
|
||||
inputs=params,
|
||||
outputs=[
|
||||
chat.chatbot,
|
||||
chat.chatbot_state,
|
||||
chat.saved_input,
|
||||
param_state,
|
||||
sess,
|
||||
],
|
||||
)
|
||||
with gr.Accordion(label="End chat", open=False):
|
||||
likes = gr.Radio(["like", "dislike", "neutral"], value="neutral")
|
||||
save_log = gr.Checkbox(
|
||||
value=True,
|
||||
label="Save log",
|
||||
info="If saved, log can be exported later",
|
||||
show_label=True,
|
||||
)
|
||||
end_btn = gr.Button("End chat")
|
||||
end_btn.click(
|
||||
func_end_chat,
|
||||
inputs=[likes, save_log, sess],
|
||||
outputs=[param_state, sess],
|
||||
)
|
||||
with gr.Accordion(label="Export", open=False):
|
||||
exported_file = gr.File(
|
||||
label="Output file", show_label=True, height=100
|
||||
)
|
||||
export_btn = gr.Button("Export")
|
||||
export_btn.click(
|
||||
func_export_to_excel, inputs=None, outputs=exported_file
|
||||
)
|
||||
|
||||
with gr.Row():
|
||||
with gr.Column():
|
||||
with gr.Tab("Params"):
|
||||
for component in params:
|
||||
component.render()
|
||||
with gr.Accordion(label="Session state", open=False):
|
||||
param_state.render()
|
||||
|
||||
with gr.Tab("Outputs"):
|
||||
for component in outputs:
|
||||
component.render()
|
||||
with gr.Column():
|
||||
chat.render()
|
||||
|
||||
return demo.queue()
|
||||
|
||||
|
||||
def build_chat_ui(config, pipeline_def):
|
||||
"""Build the chat UI
|
||||
|
||||
Args:
|
||||
config: the UI config
|
||||
pipeline_def: the pipeline definition
|
||||
|
||||
Returns:
|
||||
the UI object
|
||||
"""
|
||||
output_dir: Path = Path(storage.url(pipeline_def().config.store_result))
|
||||
exported_dir = output_dir.parent / "exported"
|
||||
exported_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def new_chat(*args):
|
||||
"""Start a new chat function
|
||||
|
||||
Args:
|
||||
*args: the pipeline init params
|
||||
|
||||
Returns:
|
||||
new empty states
|
||||
"""
|
||||
gr.Info("Starting new session...")
|
||||
param_dicts = {
|
||||
name: value for name, value in zip(config["params"].keys(), args)
|
||||
}
|
||||
for key in param_dicts.keys():
|
||||
if config["params"][key].get("component").lower() == "file":
|
||||
param_dicts[key] = param_dicts[key].name
|
||||
|
||||
# TODO: currently hard-code as ChatConversation
|
||||
pipeline = pipeline_def()
|
||||
session = ChatConversation(bot=pipeline)
|
||||
session.set(param_dicts)
|
||||
session.start_session()
|
||||
|
||||
param_state_str = "\n".join(
|
||||
f"- {name}: {value}" for name, value in param_dicts.items()
|
||||
)
|
||||
|
||||
gr.Info("New chat session started.")
|
||||
return [], [], None, param_state_str, session
|
||||
|
||||
def chat(message, history, session, *args):
|
||||
"""The chat interface
|
||||
|
||||
# TODO: wrap the input and output of this chat function so that it
|
||||
work with more types of chat conversation than simple text
|
||||
|
||||
Args:
|
||||
message: the message from the user
|
||||
history: the gradio history of the chat
|
||||
session: the chat object session
|
||||
*args: the additional inputs
|
||||
|
||||
Returns:
|
||||
the response from the chatbot
|
||||
"""
|
||||
if session is None:
|
||||
raise gr.Error(
|
||||
"No active chat session. Please set the params and click New chat"
|
||||
)
|
||||
|
||||
return session(message).content
|
||||
|
||||
def end_chat(preference: str, save_log: bool, session):
|
||||
"""End the chat session
|
||||
|
||||
Args:
|
||||
preference: the preference of the user
|
||||
save_log: whether to save the result
|
||||
session: the chat object session
|
||||
|
||||
Returns:
|
||||
the new empty state
|
||||
"""
|
||||
gr.Info("Ending session...")
|
||||
session.end_session()
|
||||
output_dir: Path = (
|
||||
Path(storage.url(session.config.store_result)) / session.last_run.id()
|
||||
)
|
||||
|
||||
if not save_log:
|
||||
if output_dir.exists():
|
||||
import shutil
|
||||
|
||||
shutil.rmtree(output_dir)
|
||||
|
||||
session = None
|
||||
param_state = ""
|
||||
gr.Info("End session without saving log.")
|
||||
return param_state, session
|
||||
|
||||
# add preference result to progress
|
||||
with (output_dir / "progress.pkl").open("rb") as fi:
|
||||
progress = pickle.load(fi)
|
||||
progress["preference"] = preference
|
||||
with (output_dir / "progress.pkl").open("wb") as fo:
|
||||
pickle.dump(progress, fo)
|
||||
|
||||
# get the original params
|
||||
param_dicts = {name: session.getx(name) for name in config["params"].keys()}
|
||||
with (output_dir / "params.pkl").open("wb") as fo:
|
||||
pickle.dump(param_dicts, fo)
|
||||
|
||||
session = None
|
||||
param_state = ""
|
||||
gr.Info("End session and save log.")
|
||||
return param_state, session
|
||||
|
||||
def export_func():
|
||||
name = (
|
||||
f"{pipeline_def.__module__}.{pipeline_def.__name__}_{datetime.now()}.xlsx"
|
||||
)
|
||||
path = str(exported_dir / name)
|
||||
gr.Info(f"Begin exporting {name}...")
|
||||
try:
|
||||
export(config=config, pipeline_def=pipeline_def, output_path=path)
|
||||
except Exception as e:
|
||||
raise gr.Error(f"Failed to export. Please contact project's AIR: {e}")
|
||||
gr.Info(f"Exported {name}. Please go to the `Exported file` tab to download")
|
||||
return path
|
||||
|
||||
demo = construct_chat_ui(
|
||||
config=config,
|
||||
func_new_chat=new_chat,
|
||||
func_chat=chat,
|
||||
func_end_chat=end_chat,
|
||||
func_export_to_excel=export_func,
|
||||
)
|
||||
return demo
|
|
@ -1,48 +1,46 @@
|
|||
import pickle
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
from typing import Any, Dict
|
||||
|
||||
import gradio as gr
|
||||
import yaml
|
||||
import pandas as pd
|
||||
from theflow.storage import storage
|
||||
from theflow.utils.modules import import_dotted_string
|
||||
|
||||
from kotaemon.contribs.promptui.base import COMPONENTS_CLASS, SUPPORTED_COMPONENTS
|
||||
from kotaemon.contribs.promptui.base import get_component
|
||||
from kotaemon.contribs.promptui.export import export
|
||||
|
||||
USAGE_INSTRUCTION = """In case of errors, you can:
|
||||
from ..logs import ResultLog
|
||||
|
||||
USAGE_INSTRUCTION = """## How to use:
|
||||
|
||||
1. Set the desired parameters.
|
||||
2. Set the desired inputs.
|
||||
3. Click "Run" to execute the pipeline with the supplied parameters and inputs
|
||||
4. The pipeline output will show up in the output panel.
|
||||
5. Repeat from step 1.
|
||||
6. To compare the result of different run, click "Export" to get an Excel
|
||||
spreadsheet summary of different run.
|
||||
|
||||
## Support:
|
||||
|
||||
In case of errors, you can:
|
||||
|
||||
- PromptUI instruction:
|
||||
https://github.com/Cinnamon/kotaemon/wiki/Utilities#prompt-engineering-ui
|
||||
- Create bug fix and make PR at: https://github.com/Cinnamon/kotaemon
|
||||
- Ping any of @john @tadashi @ian @jacky in Slack channel #llm-productization"""
|
||||
- Ping any of @john @tadashi @ian @jacky in Slack channel #llm-productization
|
||||
|
||||
## Contribute:
|
||||
|
||||
- Follow installation at: https://github.com/Cinnamon/kotaemon/
|
||||
"""
|
||||
|
||||
|
||||
def get_component(component_def: dict) -> gr.components.Component:
|
||||
"""Get the component based on component definition"""
|
||||
component_cls = None
|
||||
|
||||
if "component" in component_def:
|
||||
component = component_def["component"]
|
||||
if component not in SUPPORTED_COMPONENTS:
|
||||
raise ValueError(
|
||||
f"Unsupported UI component: {component}. "
|
||||
f"Must be one of {SUPPORTED_COMPONENTS}"
|
||||
)
|
||||
|
||||
component_cls = COMPONENTS_CLASS[component]
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Cannot decide the component from {component_def}. "
|
||||
"Please specify `component` with 1 of the following "
|
||||
f"values: {SUPPORTED_COMPONENTS}"
|
||||
)
|
||||
|
||||
return component_cls(**component_def.get("params", {}))
|
||||
|
||||
|
||||
def construct_ui(config, func_run, func_export) -> gr.Blocks:
|
||||
def construct_pipeline_ui(
|
||||
config, func_run, func_save, func_load_params, func_activate_params, func_export
|
||||
) -> gr.Blocks:
|
||||
"""Create UI from config file. Execute the UI from config file
|
||||
|
||||
- Can do now: Log from stdout to UI
|
||||
|
@ -76,16 +74,29 @@ def construct_ui(config, func_run, func_export) -> gr.Blocks:
|
|||
component_def["params"]["interactive"] = False
|
||||
component = get_component(component_def)
|
||||
if hasattr(component, "label") and not component.label: # type: ignore
|
||||
component.label = f"Output {idx}"
|
||||
component.label = f"Output {idx}" # type: ignore
|
||||
|
||||
outputs.append(component)
|
||||
|
||||
exported_file = gr.File(label="Output file", show_label=True)
|
||||
history_dataframe = gr.DataFrame(wrap=True)
|
||||
|
||||
temp = gr.Tab
|
||||
with gr.Blocks(analytics_enabled=False, title="Welcome to PromptUI") as demo:
|
||||
with gr.Accordion(label="Usage", open=False):
|
||||
with gr.Accordion(label="HOW TO", open=False):
|
||||
gr.Markdown(USAGE_INSTRUCTION)
|
||||
with gr.Accordion(label="Params History", open=False):
|
||||
with gr.Row():
|
||||
save_btn = gr.Button("Save params")
|
||||
save_btn.click(func_save, inputs=params, outputs=history_dataframe)
|
||||
load_params_btn = gr.Button("Reload params")
|
||||
load_params_btn.click(
|
||||
func_load_params, inputs=None, outputs=history_dataframe
|
||||
)
|
||||
history_dataframe.render()
|
||||
history_dataframe.select(
|
||||
func_activate_params, inputs=params, outputs=params
|
||||
)
|
||||
with gr.Row():
|
||||
run_btn = gr.Button("Run")
|
||||
run_btn.click(func_run, inputs=inputs + params, outputs=outputs)
|
||||
|
@ -95,12 +106,12 @@ def construct_ui(config, func_run, func_export) -> gr.Blocks:
|
|||
export_btn.click(func_export, inputs=None, outputs=exported_file)
|
||||
with gr.Row():
|
||||
with gr.Column():
|
||||
with temp("Inputs"):
|
||||
for component in inputs:
|
||||
component.render()
|
||||
with temp("Params"):
|
||||
for component in params:
|
||||
component.render()
|
||||
with temp("Inputs"):
|
||||
for component in inputs:
|
||||
component.render()
|
||||
with gr.Column():
|
||||
with temp("Outputs"):
|
||||
for component in outputs:
|
||||
|
@ -111,6 +122,23 @@ def construct_ui(config, func_run, func_export) -> gr.Blocks:
|
|||
return demo
|
||||
|
||||
|
||||
def load_saved_params(path: str) -> Dict:
|
||||
"""Load the saved params from path to a dataframe"""
|
||||
# get all pickle files
|
||||
files = list(sorted(Path(path).glob("*.pkl")))
|
||||
data: Dict[str, Any] = {"_id": [None] * len(files)}
|
||||
for idx, each_file in enumerate(files):
|
||||
with open(each_file, "rb") as f:
|
||||
each_data = pickle.load(f)
|
||||
data["_id"][idx] = Path(each_file).stem
|
||||
for key, value in each_data.items():
|
||||
if key not in data:
|
||||
data[key] = [None] * len(files)
|
||||
data[key][idx] = value
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def build_pipeline_ui(config: dict, pipeline_def):
|
||||
"""Build a tab from config file"""
|
||||
inputs_name = list(config.get("inputs", {}).keys())
|
||||
|
@ -121,6 +149,16 @@ def build_pipeline_ui(config: dict, pipeline_def):
|
|||
exported_dir = output_dir.parent / "exported"
|
||||
exported_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
save_dir = (
|
||||
output_dir.parent
|
||||
/ "saved"
|
||||
/ f"{pipeline_def.__module__}.{pipeline_def.__name__}"
|
||||
)
|
||||
save_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
resultlog = getattr(pipeline_def, "_promptui_resultlog", ResultLog)
|
||||
allowed_resultlog_callbacks = {i for i in dir(resultlog) if not i.startswith("__")}
|
||||
|
||||
def run_func(*args):
|
||||
inputs = {
|
||||
name: value for name, value in zip(inputs_name, args[: len(inputs_name)])
|
||||
|
@ -142,11 +180,50 @@ def build_pipeline_ui(config: dict, pipeline_def):
|
|||
outputs = []
|
||||
for output_def in outputs_def:
|
||||
output = pipeline.last_run.logs(output_def["step"])
|
||||
if "item" in output_def:
|
||||
output = output[output_def["item"]]
|
||||
getter = output_def.get("getter", None)
|
||||
if getter and getter in allowed_resultlog_callbacks:
|
||||
output = getattr(resultlog, getter)(output)
|
||||
outputs.append(output)
|
||||
if len(outputs_def) == 1:
|
||||
return outputs[0]
|
||||
return outputs
|
||||
|
||||
def save_func(*args):
|
||||
params = {name: value for name, value in zip(params_name, args)}
|
||||
filename = save_dir / f"{int(time.time())}.pkl"
|
||||
with open(filename, "wb") as f:
|
||||
pickle.dump(params, f)
|
||||
gr.Info("Params saved")
|
||||
|
||||
data = load_saved_params(str(save_dir))
|
||||
return pd.DataFrame(data)
|
||||
|
||||
def load_params_func():
|
||||
data = load_saved_params(str(save_dir))
|
||||
return pd.DataFrame(data)
|
||||
|
||||
def activate_params_func(ev: gr.SelectData, *args):
|
||||
data = load_saved_params(str(save_dir))
|
||||
output_args = [each for each in args]
|
||||
if ev.value is None:
|
||||
gr.Info(f'Blank value: "{ev.value}". Skip')
|
||||
return output_args
|
||||
|
||||
column = list(data.keys())[ev.index[1]]
|
||||
|
||||
if column not in params_name:
|
||||
gr.Info(f'Column "{column}" not in params. Skip')
|
||||
return output_args
|
||||
|
||||
value = data[column][ev.index[0]]
|
||||
if value is None:
|
||||
gr.Info(f'Blank value: "{ev.value}". Skip')
|
||||
return output_args
|
||||
|
||||
output_args[params_name.index(column)] = value
|
||||
|
||||
return output_args
|
||||
|
||||
def export_func():
|
||||
name = (
|
||||
f"{pipeline_def.__module__}.{pipeline_def.__name__}_{datetime.now()}.xlsx"
|
||||
|
@ -160,31 +237,6 @@ def build_pipeline_ui(config: dict, pipeline_def):
|
|||
gr.Info(f"Exported {name}. Please go to the `Exported file` tab to download")
|
||||
return path
|
||||
|
||||
return construct_ui(config, run_func, export_func)
|
||||
|
||||
|
||||
def build_from_dict(config: Union[str, dict]):
|
||||
"""Build a full UI from YAML config file"""
|
||||
|
||||
if isinstance(config, str):
|
||||
with open(config) as f:
|
||||
config_dict: dict = yaml.safe_load(f)
|
||||
elif isinstance(config, dict):
|
||||
config_dict = config
|
||||
else:
|
||||
raise ValueError(
|
||||
f"config must be either a yaml path or a dict, got {type(config)}"
|
||||
)
|
||||
|
||||
demos = []
|
||||
for key, value in config_dict.items():
|
||||
pipeline_def = import_dotted_string(key, safe=False)
|
||||
demos.append(build_pipeline_ui(value, pipeline_def))
|
||||
if len(demos) == 1:
|
||||
demo = demos[0]
|
||||
else:
|
||||
demo = gr.TabbedInterface(demos, list(config_dict.keys()))
|
||||
|
||||
demo.queue()
|
||||
|
||||
return demo
|
||||
return construct_pipeline_ui(
|
||||
config, run_func, save_func, load_params_func, activate_params_func, export_func
|
||||
)
|
|
@ -0,0 +1,13 @@
|
|||
from langchain.schema.messages import AIMessage, SystemMessage
|
||||
|
||||
from .chats import AzureChatOpenAI, ChatLLM
|
||||
from .chats.base import BaseMessage, HumanMessage
|
||||
|
||||
__all__ = [
|
||||
"ChatLLM",
|
||||
"AzureChatOpenAI",
|
||||
"BaseMessage",
|
||||
"HumanMessage",
|
||||
"AIMessage",
|
||||
"SystemMessage",
|
||||
]
|
|
@ -0,0 +1,4 @@
|
|||
from .base import ChatLLM
|
||||
from .openai import AzureChatOpenAI
|
||||
|
||||
__all__ = ["ChatLLM", "AzureChatOpenAI"]
|
Loading…
Reference in New Issue
Block a user