Skip to content
Snippets Groups Projects
Commit e8c6c2df authored by Julian Rasch's avatar Julian Rasch
Browse files

Merge branch 'dev' into 'dev'

Dev

See merge request !4
parents b268e3ed f7166c60
No related branches found
No related tags found
2 merge requests!6Finalized Jupyterlab for the sprint,!4Dev
Pipeline #186926 passed
......@@ -9,13 +9,12 @@ RUN conda env update -q -f /tmp/environment.yml && \
conda env export -n "root" && \
jupyter lab build
RUN pip3 install --upgrade pip
COPY dash_proxy /tmp/dash_proxy/
RUN pip install /tmp/dash_proxy/
COPY llm_utils /llm_utils/
RUN pip install /llm_utils/
ENV CONFIG_PATH=/home/jovyan/config.txt
COPY app /dash/app/
RUN chown -R jovyan /dash/app/
import os
from datetime import datetime
from dash import (
......@@ -10,7 +11,7 @@ from dash.dependencies import (
State
)
from llm_utils.client import ChatGPT
from llm_utils.client import ChatGPT, get_openai_client
def format_chat_messages(chat_history):
......@@ -24,8 +25,15 @@ def format_chat_messages(chat_history):
def register_callbacks(app: Dash):
chat_gpt = ChatGPT(model="gpt4")
model="gpt4"
client = get_openai_client(
model=model,
config_path=os.environ.get("CONFIG_PATH")
)
chat_gpt = ChatGPT(
client=client,
model="gpt4"
)
@app.callback(
[Output('chat-container', 'children'),
......
import sys
sys.path.append("/home/jovyan/")
import argparse
import logging
from urllib.parse import urlparse, urljoin
from dash import Dash
from jupyter_server.serverapp import list_running_servers
try:
from my_layout import layout
from my_callbacks import register_callbacks
except ModuleNotFoundError:
# do not let Dash start
exit()
logging.basicConfig(level=logging.INFO)
# weird trick to find base_url for the jupyterlab
def find_jupyterlab_base_url():
servers = list_running_servers()
for server in servers:
if server["port"] == 8888:
return server['url']
return None
# get the correct port from proxy
parser = argparse.ArgumentParser()
parser.add_argument("--port", type=int)
args = parser.parse_args()
port: int = args.port
if not port:
raise ValueError(f"Port of proxy server for Dash not found in {args}.")
else:
logging.debug(f"Dash app running on port {port}.")
base_url = find_jupyterlab_base_url()
if base_url is None:
raise ValueError("Base URL of Jupyterlab could not be detected.")
logging.debug(f"Base URL: {base_url}")
proxy_base_path = urlparse(urljoin(base_url + "/", f"proxy/{port}/")).path
logging.debug(f"Proxy base path: {proxy_base_path}")
# define Dash app
app = Dash(
name=__name__,
requests_pathname_prefix=proxy_base_path
)
# define layout
app.layout = layout
# register all callback functions
register_callbacks(app=app)
# Run Dash app in the notebook
app.run(
jupyter_mode="jupyterlab",
port=port,
host="0.0.0.0",
debug=True
)
def setup_my_app_proxy():
command = [
'python',
'/dash/app/my_app.py',
'--port',
'{port}'
]
return {
"command": command,
"new_browser_tab": False,
"launcher_entry": {
"enabled": True,
'title': 'MyApp'
}
}
......@@ -5,11 +5,12 @@ setuptools.setup(
author_email="julian.rasch@fh-muenster.de",
description="A small module to run Dash inside a dockerized Jupyterlab.",
name="jupyter-dash-proxy",
py_modules=["dash_proxy"],
py_modules=["dash_proxy", "my_app_proxy"],
entry_points={
"jupyter_serverproxy_servers": [
# name = packagename:function_name
"Dash = dash_proxy:setup_dash_proxy",
"MyApp = my_app_proxy:setup_my_app_proxy"
]
},
install_requires=["jupyter-server-proxy==4.0.0"],
......
import os
import logging
from openai import AzureOpenAI
from dotenv import load_dotenv
from enum import Enum
try:
found_dotenv = load_dotenv(
"/home/jovyan/config.txt",
override=True
)
except ValueError:
logging.warn("Could not detect config.txt in /home/jovyan/. Searching in current folder ...")
found_dotenv = load_dotenv(
"config.txt",
override=True)
if not found_dotenv:
raise ValueError("Could not detect config.txt in /home/jovyan/.")
AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION")
class OpenAIModels(Enum):
GPT_3 = "gpt3"
......@@ -33,13 +15,25 @@ class OpenAIModels(Enum):
return [member.value for member in cls]
def get_openai_client(model: str) -> AzureOpenAI:
def get_openai_client(
model: str,
config_path: str
) -> AzureOpenAI:
if not model in OpenAIModels.get_all_values():
raise ValueError(f"<model> needs to be one of {OpenAIModels.get_all_values()}.")
load_dotenv(
dotenv_path=config_path,
override=True
)
AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION")
if any(p is None for p in (AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_KEY, OPENAI_API_VERSION)):
raise ValueError(
f"""None of the following parameters can be none:
f"""None of the following parameters can be None:
AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
OPENAI_API_VERSION: {OPENAI_API_VERSION}
......@@ -56,9 +50,9 @@ def get_openai_client(model: str) -> AzureOpenAI:
class ChatGPT:
def __init__(self, model="gpt4"):
def __init__(self, client: AzureOpenAI, model: str):
self.model = model
self.client = get_openai_client(model=model)
self.client = client
self.messages = []
def chat_with_gpt(self, user_input: str):
......
......@@ -2,11 +2,14 @@ jupyter-server-proxy==4.0.0
jupyterlab-git==0.42.0
jupyter_server>=2.0
flake8
dash
dash-bootstrap-components
plotly
openai
rapidfuzz
nltk
python-dotenv
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment