From 783fe0589f236a48cdc30f92675620cdd4b62ff3 Mon Sep 17 00:00:00 2001 From: Julian <julian@edyoucated.org> Date: Tue, 2 Apr 2024 17:30:38 +0200 Subject: [PATCH] removed flake8 due to errors, simplified client --- Dockerfile | 4 +--- app/callbacks.py | 14 +++++++++--- llm_utils/src/llm_utils/client.py | 38 +++++++++++++------------------ 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7454dc2..535ee2b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,14 +9,12 @@ RUN conda env update -q -f /tmp/environment.yml && \ conda env export -n "root" && \ jupyter lab build -RUN pip3 install --upgrade pip -RUN pip install jupyterlab_flake8 - COPY dash_proxy /tmp/dash_proxy/ RUN pip install /tmp/dash_proxy/ COPY llm_utils /llm_utils/ RUN pip install /llm_utils/ +ENV CONFIG_PATH=/home/jovyan/config.txt COPY app /dash/app/ RUN chown -R jovyan /dash/app/ diff --git a/app/callbacks.py b/app/callbacks.py index 6076465..9a61c5c 100644 --- a/app/callbacks.py +++ b/app/callbacks.py @@ -1,3 +1,4 @@ +import os from datetime import datetime from dash import ( @@ -10,7 +11,7 @@ from dash.dependencies import ( State ) -from llm_utils.client import ChatGPT +from llm_utils.client import ChatGPT, get_openai_client def format_chat_messages(chat_history): @@ -24,8 +25,15 @@ def format_chat_messages(chat_history): def register_callbacks(app: Dash): - - chat_gpt = ChatGPT(model="gpt4") + model="gpt4" + client = get_openai_client( + model=model, + config_path=os.environ.get("CONFIG_PATH") + ) + chat_gpt = ChatGPT( + client=client, + model="gpt4" + ) @app.callback( [Output('chat-container', 'children'), diff --git a/llm_utils/src/llm_utils/client.py b/llm_utils/src/llm_utils/client.py index 82ceeac..1bca863 100644 --- a/llm_utils/src/llm_utils/client.py +++ b/llm_utils/src/llm_utils/client.py @@ -1,27 +1,9 @@ import os -import logging from openai import AzureOpenAI from dotenv import load_dotenv from enum import Enum -try: - found_dotenv = load_dotenv( - "/home/jovyan/config.txt", - override=True - ) -except ValueError: - logging.warn("Could not detect config.txt in /home/jovyan/. Searching in current folder ...") - found_dotenv = load_dotenv( - "config.txt", - override=True) - -if not found_dotenv: - raise ValueError("Could not detect config.txt in /home/jovyan/.") - -AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY") -AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT") -OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION") class OpenAIModels(Enum): GPT_3 = "gpt3" @@ -33,13 +15,25 @@ class OpenAIModels(Enum): return [member.value for member in cls] -def get_openai_client(model: str) -> AzureOpenAI: +def get_openai_client( + model: str, + config_path: str + ) -> AzureOpenAI: if not model in OpenAIModels.get_all_values(): raise ValueError(f"<model> needs to be one of {OpenAIModels.get_all_values()}.") + load_dotenv( + dotenv_path=config_path, + override=True + ) + + AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY") + AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT") + OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION") + if any(p is None for p in (AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_KEY, OPENAI_API_VERSION)): raise ValueError( - f"""None of the following parameters can be none: + f"""None of the following parameters can be None: AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY}, AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY}, OPENAI_API_VERSION: {OPENAI_API_VERSION} @@ -56,9 +50,9 @@ def get_openai_client(model: str) -> AzureOpenAI: class ChatGPT: - def __init__(self, model="gpt4"): + def __init__(self, client: AzureOpenAI, model: str): self.model = model - self.client = get_openai_client(model=model) + self.client = client self.messages = [] def chat_with_gpt(self, user_input: str): -- GitLab