diff --git a/Dockerfile b/Dockerfile
index 7454dc26a54ab0cbeff577c410125025d21203c8..535ee2b675b2afbe382e396a9f8442f8c62a8234 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -9,14 +9,12 @@ RUN conda env update -q -f /tmp/environment.yml && \
     conda env export -n "root" && \
     jupyter lab build 
 
-RUN pip3 install --upgrade pip
-RUN pip install jupyterlab_flake8
-
 COPY dash_proxy /tmp/dash_proxy/
 RUN pip install /tmp/dash_proxy/
 
 COPY llm_utils /llm_utils/
 RUN pip install /llm_utils/
+ENV CONFIG_PATH=/home/jovyan/config.txt
 
 COPY app /dash/app/
 RUN chown -R jovyan /dash/app/
diff --git a/app/callbacks.py b/app/callbacks.py
index 6076465849aca4147f3158bc3fe3d50db435cbfa..9a61c5cb2d483203df434aae8482552bac8dfc0c 100644
--- a/app/callbacks.py
+++ b/app/callbacks.py
@@ -1,3 +1,4 @@
+import os
 from datetime import datetime
 
 from dash import (
@@ -10,7 +11,7 @@ from dash.dependencies import (
     State
 )
 
-from llm_utils.client import ChatGPT
+from llm_utils.client import ChatGPT, get_openai_client
 
 
 def format_chat_messages(chat_history):
@@ -24,8 +25,15 @@ def format_chat_messages(chat_history):
 
 
 def register_callbacks(app: Dash):
-    
-    chat_gpt = ChatGPT(model="gpt4")
+    model="gpt4"
+    client = get_openai_client(
+        model=model,
+        config_path=os.environ.get("CONFIG_PATH")
+    )
+    chat_gpt = ChatGPT(
+        client=client,
+        model="gpt4"
+    )
     
     @app.callback(
         [Output('chat-container', 'children'),
diff --git a/llm_utils/src/llm_utils/client.py b/llm_utils/src/llm_utils/client.py
index 82ceeac15722b63c195615bcb18a5563f36d349d..1bca863281a30f6cca7798da46b09adbb125d0a7 100644
--- a/llm_utils/src/llm_utils/client.py
+++ b/llm_utils/src/llm_utils/client.py
@@ -1,27 +1,9 @@
 import os
-import logging
 from openai import AzureOpenAI
 from dotenv import load_dotenv
 
 from enum import Enum
 
-try:
-    found_dotenv = load_dotenv(
-        "/home/jovyan/config.txt",
-        override=True
-    )
-except ValueError:
-    logging.warn("Could not detect config.txt in /home/jovyan/. Searching in current folder ...")
-    found_dotenv = load_dotenv(
-        "config.txt",
-        override=True)
-
-if not found_dotenv: 
-    raise ValueError("Could not detect config.txt in /home/jovyan/.")
-
-AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
-AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
-OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION")
 
 class OpenAIModels(Enum):
     GPT_3 = "gpt3"
@@ -33,13 +15,25 @@ class OpenAIModels(Enum):
         return [member.value for member in cls]
 
 
-def get_openai_client(model: str) -> AzureOpenAI:
+def get_openai_client(
+    model: str,
+    config_path: str
+    ) -> AzureOpenAI:
     if not model in OpenAIModels.get_all_values():
         raise ValueError(f"<model> needs to be one of {OpenAIModels.get_all_values()}.")
     
+    load_dotenv(
+        dotenv_path=config_path,
+        override=True
+    )
+
+    AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
+    AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
+    OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION")
+    
     if any(p is None for p in (AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_KEY, OPENAI_API_VERSION)):
         raise ValueError(
-            f"""None of the following parameters can be none: 
+            f"""None of the following parameters can be None: 
             AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
             AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
             OPENAI_API_VERSION: {OPENAI_API_VERSION}
@@ -56,9 +50,9 @@ def get_openai_client(model: str) -> AzureOpenAI:
 
 
 class ChatGPT:
-    def __init__(self, model="gpt4"):
+    def __init__(self, client: AzureOpenAI, model: str):
         self.model = model
-        self.client = get_openai_client(model=model)
+        self.client = client
         self.messages = []
 
     def chat_with_gpt(self, user_input: str):