diff --git a/.gitignore b/.gitignore
index 6609e2b52a8167d7633d53087d93804b72ffd227..8103524e9cd7dfb6530505754b309345c7b7ecb8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,3 +5,8 @@
 *.pyc
 *.txt
 config.txt
+
+run.sh
+build.sh
+run_my_app.sh
+run_streamlit_app.sh
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
index a6280ceea78475872a2e6dd45474e28701b7cb55..3e4f96529ff490e360332f6f9512966bab8dcf3b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,26 +1,28 @@
-FROM jupyter/datascience-notebook:hub-3.1.1
+FROM quay.io/jupyter/datascience-notebook:hub-5.2.0
 
 USER root
 
-COPY requirements.txt environment.yml /tmp/
-RUN conda env update -q -f /tmp/environment.yml && \
-    /opt/conda/bin/pip install -r /tmp/requirements.txt && \
-    conda clean -y --all && \
-    conda env export -n "root" && \
-    jupyter lab build 
+COPY requirements.txt .
+RUN pip install --no-cache-dir -r requirements.txt && rm requirements.txt
 
-COPY dash_proxy /tmp/dash_proxy/
-RUN pip install /tmp/dash_proxy/
+ENV HOME_PATH=/home/jovyan/
 
+# add the proxy for streamlit
+COPY streamlit_proxy /tmp/streamlit_proxy/
+RUN pip install /tmp/streamlit_proxy/
+
+# install some utilities for GPT
 COPY llm_utils /llm_utils/
 RUN pip install /llm_utils/
 ENV CONFIG_PATH=/home/jovyan/config.txt
 
-COPY app /dash/app/
-RUN chown -R jovyan /dash/app/
+# copy the apps into the container
+COPY app /streamlit/app/
+RUN chown -R jovyan /streamlit/app/
 
 # install some NLTK and spaCy data
 RUN python -m nltk.downloader stopwords
 RUN python -m nltk.downloader wordnet
 RUN python -m nltk.downloader punkt
-RUN python -m spacy download en_core_web_sm
+
+USER jovyan
\ No newline at end of file
diff --git a/app/app.py b/app/app.py
index b43ae3ac69e2d41ac5bf45446abf5d36aa5d7996..66b5c966800efb8e2b40f716147132c89243770d 100644
--- a/app/app.py
+++ b/app/app.py
@@ -1,65 +1,49 @@
-import sys 
-sys.path.append("/home/jovyan/")
+import os
+import streamlit as st
 
-import argparse
-import logging
+from llm_utils.client import get_openai_client
 
-from urllib.parse import urlparse, urljoin
 
-from dash import Dash
-
-from jupyter_server.serverapp import list_running_servers
-
-from layout import layout
-from callbacks import register_callbacks
-
-logging.basicConfig(level=logging.INFO)
-
-# weird trick to find base_url for the jupyterlab
-def find_jupyterlab_base_url():
-    servers = list_running_servers()
-    for server in servers:
-        if server["port"] == 8888:
-            return server['url']
-    return None
-
-
-# get the correct port from proxy
-parser = argparse.ArgumentParser()
-parser.add_argument("--port", type=int)
-args = parser.parse_args()
-port: int = args.port
+MODEL = "gpt-4o"
+client = get_openai_client(
+    model=MODEL,
+    config_path=os.environ.get("CONFIG_PATH")
+)
 
-if not port:
-    raise ValueError(f"Port of proxy server for Dash not found in {args}.")
-else: 
-    logging.debug(f"Dash app running on port {port}.")
 
 
-base_url = find_jupyterlab_base_url()
-if base_url is None:
-    raise ValueError("Base URL of Jupyterlab could not be detected.")
-logging.debug(f"Base URL: {base_url}")
+# STREAMLIT APP
 
-proxy_base_path = urlparse(urljoin(base_url + "/", f"proxy/{port}/")).path
-logging.debug(f"Proxy base path: {proxy_base_path}")
+st.title("ChatGPT in Streamlit")
 
-# define Dash app
-app = Dash(
-    name=__name__, 
-    requests_pathname_prefix=proxy_base_path
+client = get_openai_client(
+    model=MODEL,
+    config_path=os.environ.get("CONFIG_PATH")
 )
 
-# define layout
-app.layout = layout
-
-# register all callback functions
-register_callbacks(app=app)
-
-# Run Dash app in the notebook
-app.run(
-    jupyter_mode="jupyterlab", 
-    port=port, 
-    host="0.0.0.0",
-    debug=True
-)
+if "openai_model" not in st.session_state:
+    st.session_state["openai_model"] = "gpt-4o"
+
+if "messages" not in st.session_state:
+    st.session_state.messages = []
+
+for message in st.session_state.messages:
+    with st.chat_message(message["role"]):
+        st.markdown(message["content"])
+
+if prompt := st.chat_input("What is up?"):
+    st.session_state.messages.append({"role": "user", "content": prompt})
+    with st.chat_message("user"):
+        st.markdown(prompt)
+
+    with st.chat_message("assistant"):
+        stream = client.chat.completions.create(
+            model=st.session_state["openai_model"],
+            messages=[
+                {"role": m["role"], "content": m["content"]}
+                for m in st.session_state.messages
+            ],
+            stream=True,
+        )
+        response = st.write_stream(stream)
+    st.session_state.messages.append({"role": "assistant", "content": response})
\ No newline at end of file
diff --git a/app/callbacks.py b/app/callbacks.py
deleted file mode 100644
index 9a61c5cb2d483203df434aae8482552bac8dfc0c..0000000000000000000000000000000000000000
--- a/app/callbacks.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import os
-from datetime import datetime
-
-from dash import (
-    html, 
-    Dash
-)
-from dash.dependencies import (
-    Input, 
-    Output, 
-    State
-)
-
-from llm_utils.client import ChatGPT, get_openai_client
-
-
-def format_chat_messages(chat_history):
-    chat_messages = []
-    for message in chat_history:
-        chat_messages.append(html.Div([
-            html.P(f'{message["sender"]}: {message["message"]}'),
-            html.P(f'Sent at: {message["timestamp"]}')
-        ]))
-    return chat_messages
-
-
-def register_callbacks(app: Dash):
-    model="gpt4"
-    client = get_openai_client(
-        model=model,
-        config_path=os.environ.get("CONFIG_PATH")
-    )
-    chat_gpt = ChatGPT(
-        client=client,
-        model="gpt4"
-    )
-    
-    @app.callback(
-        [Output('chat-container', 'children'),
-        Output('chat-history', 'data')],
-        [Input('send-button', 'n_clicks')],
-        [State('user-input', 'value'),
-        State('chat-history', 'data')]
-    )
-    def update_chat(n_clicks, input_value, chat_history):
-        if chat_history is None:
-            chat_history = []
-        
-        if n_clicks > 0 and input_value:
-            chat_history.append({
-                'sender': 'User',
-                'message': input_value,
-                'timestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-            })
-            
-            response = chat_gpt.chat_with_gpt(input_value)
-            
-            # Add response to chat history
-            chat_history.append({
-                'sender': 'Language Model',
-                'message': response,
-                'timestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-            })
-        
-        return format_chat_messages(chat_history), chat_history
diff --git a/app/layout.py b/app/layout.py
deleted file mode 100644
index 4d82a24710224074e9123450133daf5eefd3aac2..0000000000000000000000000000000000000000
--- a/app/layout.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from dash import (
-    html, 
-    dcc
-)
-
-layout = html.Div([
-    dcc.Store(
-        id='chat-history', 
-        data=[]
-    ),
-    html.H1(
-        "Simple Chat App", 
-        style={'text-align': 'center'}
-    ),
-    html.Div(
-        id='chat-container', 
-        style={'overflowY': 'scroll', 'height': '70vh', 'padding': '10px'}
-    ),
-    html.Div([
-        dcc.Input(
-            id='user-input', 
-            type='text', 
-            placeholder='Type your message...', 
-            debounce=True
-        ),
-        html.Button(
-            'Send', 
-            id='send-button', 
-            n_clicks=0
-        )
-    ], style={
-        'display': 'flex', 
-        'alignItems': 'center', 
-        'justifyContent': 'center', 
-        'position': 'fixed', 
-        'bottom': 0, 
-        'width': '100%', 
-        'padding': '10px'
-    })
-], style={'position': 'relative'})
diff --git a/app/my_app.py b/app/my_app.py
index db083be592f45c76bd6c05d3f2783614b84cfcb0..7d203b4f9b4017f4aa45bfd52d55cdca6a64c2a5 100644
--- a/app/my_app.py
+++ b/app/my_app.py
@@ -1,70 +1,15 @@
+import os
 import sys 
-sys.path.append("/home/jovyan/")
 
-import argparse
-import logging
+HOME_PATH = os.environ.get("HOME_PATH")
+sys.path.append(HOME_PATH)
 
-from urllib.parse import urlparse, urljoin
-
-from dash import Dash
-
-from jupyter_server.serverapp import list_running_servers
+print(HOME_PATH)
 
 try: 
-    from my_layout import layout
-    from my_callbacks import register_callbacks
-except ModuleNotFoundError:
-    # do not let Dash start
-    exit()
-
-
-logging.basicConfig(level=logging.INFO)
-
-# weird trick to find base_url for the jupyterlab
-def find_jupyterlab_base_url():
-    servers = list_running_servers()
-    for server in servers:
-        if server["port"] == 8888:
-            return server['url']
-    return None
-
-
-# get the correct port from proxy
-parser = argparse.ArgumentParser()
-parser.add_argument("--port", type=int)
-args = parser.parse_args()
-port: int = args.port
+    import streamlit_app # this runs the app
 
-if not port:
-    raise ValueError(f"Port of proxy server for Dash not found in {args}.")
-else: 
-    logging.debug(f"Dash app running on port {port}.")
-
-
-base_url = find_jupyterlab_base_url()
-if base_url is None:
-    raise ValueError("Base URL of Jupyterlab could not be detected.")
-logging.debug(f"Base URL: {base_url}")
-
-proxy_base_path = urlparse(urljoin(base_url + "/", f"proxy/{port}/")).path
-logging.debug(f"Proxy base path: {proxy_base_path}")
-
-# define Dash app
-app = Dash(
-    name=__name__, 
-    requests_pathname_prefix=proxy_base_path
-)
-
-# define layout
-app.layout = layout
-
-# register all callback functions
-register_callbacks(app=app)
-
-# Run Dash app in the notebook
-app.run(
-    jupyter_mode="jupyterlab", 
-    port=port, 
-    host="0.0.0.0",
-    debug=True
-)
+except ModuleNotFoundError as e:
+    # do not let start
+    print(e)
+    exit()
diff --git a/app/streamlit-favicon.svg b/app/streamlit-favicon.svg
new file mode 100644
index 0000000000000000000000000000000000000000..f676ed8d5b685bb309b0b058e753d91afd3d2e04
--- /dev/null
+++ b/app/streamlit-favicon.svg
@@ -0,0 +1,5 @@
+<svg width="512" height="512" viewBox="0 0 512 512" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M255.968 288.494L166.211 241.067L10.4062 158.753C10.2639 158.611 9.97951 158.611 9.83728 158.611C4.14838 155.909 -1.68275 161.596 0.450591 167.283L79.8393 369.685L79.8535 369.728C79.9388 369.927 80.0099 370.126 80.0953 370.325C83.3522 377.874 90.4633 382.537 98.2002 384.371C98.8544 384.513 99.3225 384.643 100.108 384.799C100.89 384.973 101.983 385.21 102.922 385.281C103.078 385.295 103.221 385.295 103.377 385.31H103.491C103.605 385.324 103.718 385.324 103.832 385.338H103.989C104.088 385.352 104.202 385.352 104.302 385.352H104.486C104.6 385.366 104.714 385.366 104.828 385.366L167.175 392.161C226.276 398.602 285.901 398.602 345.002 392.161L407.35 385.366C408.558 385.366 409.739 385.31 410.877 385.196C411.246 385.153 411.602 385.111 411.958 385.068C412 385.054 412.057 385.054 412.1 385.039C412.342 385.011 412.583 384.968 412.825 384.926C413.181 384.883 413.536 384.812 413.892 384.741C414.603 384.585 414.926 384.471 415.891 384.139C416.856 383.808 418.458 383.228 419.461 382.745C420.464 382.261 421.159 381.798 421.999 381.272C423.037 380.618 424.024 379.948 425.025 379.198C425.457 378.868 425.753 378.656 426.066 378.358L425.895 378.258L255.968 288.494Z" fill="#FF2B2B"/>
+<path d="M501.789 158.755H501.647L345.784 241.07L432.426 370.058L511.616 167.285V167.001C513.607 161.03 507.492 155.627 501.789 158.755" fill="#7D353B"/>
+<path d="M264.274 119.615C260.292 113.8 251.616 113.8 247.776 119.615L166.211 241.068L255.968 288.495L426.067 378.357C427.135 377.312 427.991 376.293 428.897 375.217C430.177 373.638 431.372 371.947 432.424 370.056L345.782 241.068L264.274 119.615Z" fill="#BD4043"/>
+</svg>
diff --git a/dash_proxy/dash_proxy.py b/dash_proxy/dash_proxy.py
deleted file mode 100644
index 309e977e30e4ae08b01c6f3c3734e673b94fb444..0000000000000000000000000000000000000000
--- a/dash_proxy/dash_proxy.py
+++ /dev/null
@@ -1,16 +0,0 @@
-def setup_dash_proxy():
-    command = [
-        'python',
-        '/dash/app/app.py',
-        '--port',
-        '{port}'
-    ]
-    
-    return {
-        "command": command,
-        "new_browser_tab": False,
-        "launcher_entry": {
-            "enabled": True,
-            'title': 'Dash'
-        }
-    }
diff --git a/dash_proxy/my_app_proxy.py b/dash_proxy/my_app_proxy.py
deleted file mode 100644
index e91a8649001749f630ee85b4e6706f99049fb634..0000000000000000000000000000000000000000
--- a/dash_proxy/my_app_proxy.py
+++ /dev/null
@@ -1,16 +0,0 @@
-def setup_my_app_proxy():
-    command = [
-        'python',
-        '/dash/app/my_app.py',
-        '--port',
-        '{port}'
-    ]
-    
-    return {
-        "command": command,
-        "new_browser_tab": False,
-        "launcher_entry": {
-            "enabled": True,
-            'title': 'MyApp'
-        }
-    }
diff --git a/environment.yml b/environment.yml
deleted file mode 100644
index eef32e175e1f18d058cee69d1e5baf91d7bdca45..0000000000000000000000000000000000000000
--- a/environment.yml
+++ /dev/null
@@ -1,7 +0,0 @@
-name: "base"
-channels:
-  - defaults
-# dependencies:
-# - add packages here
-# - one per line
-prefix: "/opt/conda"
diff --git a/llm_utils/src/llm_utils/client.py b/llm_utils/src/llm_utils/client.py
index 1bca863281a30f6cca7798da46b09adbb125d0a7..9cc1544938df1cba46aad924b5fdf8ab41ac313f 100644
--- a/llm_utils/src/llm_utils/client.py
+++ b/llm_utils/src/llm_utils/client.py
@@ -8,6 +8,7 @@ from enum import Enum
 class OpenAIModels(Enum):
     GPT_3 = "gpt3"
     GPT_4 = "gpt4"
+    GPT_4o = "gpt-4o"
     EMBED = "embed"
 
     @classmethod
@@ -43,7 +44,7 @@ def get_openai_client(
     client = AzureOpenAI(
         api_key=AZURE_OPENAI_API_KEY,
         azure_endpoint=AZURE_OPENAI_ENDPOINT,
-        api_version=OPENAI_API_VERSION, 
+        # api_version=OPENAI_API_VERSION, 
         azure_deployment=model
     )
     return client
diff --git a/my_callbacks.py b/my_callbacks.py
deleted file mode 100644
index 6640c9dab08cbf2a5826a0511b78b6e03a6e215c..0000000000000000000000000000000000000000
--- a/my_callbacks.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from dash.dependencies import (
-    Input, 
-    Output
-)
-from dash import html
-
-
-def register_callbacks(app):
-    @app.callback(
-        Output('output-container-button', 'children'),
-        [Input('submit-btn', 'n_clicks')],
-        [Input('input-text', 'value')]
-    )
-    def update_output(n_clicks, input_value):
-        if n_clicks > 0:
-            return html.Div([
-                html.Label("You entered:"),
-                html.P(input_value)
-            ])
-        else:
-            return ''
diff --git a/my_layout.py b/my_layout.py
deleted file mode 100644
index e0cf7c4a9a74950db48449451e5e594fa8df9140..0000000000000000000000000000000000000000
--- a/my_layout.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from dash import html
-from dash import dcc
-
-
-layout = html.Div([
-    html.H1("Yeay, my app!"),
-    html.Div([
-        html.Label("Enter your text:"),
-        dcc.Input(id='input-text', type='text', value=''),
-        html.Button('Submit', id='submit-btn', n_clicks=0),
-    ]),
-    html.Div(id='output-container-button')
-])
diff --git a/python_startup/.gitkeep b/python_startup/.gitkeep
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/python_startup/01_load_db.py b/python_startup/01_load_db.py
deleted file mode 100644
index e30e171113b983781ffdc0dde64f5936f46c9472..0000000000000000000000000000000000000000
--- a/python_startup/01_load_db.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import os
-from dotenv import load_dotenv
-import sqlalchemy
-import pandas as pd
-from urllib.parse import quote_plus
-
-load_dotenv("/usr/src/app/.env")
-
-class Database:
-    def __init__(self, db_name):
-        self.conn = db_connect(db_name)
-
-    def __getattr__(self, table_name):
-        return pd.read_sql_table(table_name, self.conn)
-
-    def list_tables(self):
-        inspector = sqlalchemy.inspect(self.conn)
-        table_names = inspector.get_table_names()
-        return table_names
-
-def db_connect(db_name):
-    hostname=os.getenv("DB_HOST")
-    user=os.getenv("DB_USER")
-    password=quote_plus(os.getenv("DB_PASSWORD"))
-    conn = sqlalchemy.create_engine(f'postgresql+psycopg2://{user}:{password}@{hostname}/{db_name}')
-    return conn
-
-def get_table(db_name, table_name):
-    conn = db_connect(db_name)
-    dat = pd.read_sql_table(table_name, conn)
-    return dat
-
-def get_all_tables(db_name):
-    db_obj = Database(db_name)
-    return db_obj
-
diff --git a/requirements.txt b/requirements.txt
index 2f01d76914486ad61fecb76519511ad9d28bb01c..e38d7048e8a887c61c87cbfc3763388a8ac1148e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,19 +1,9 @@
-jupyter-server-proxy==4.0.0
-jupyterlab-git==0.42.0
-jupyter_server>=2.0
-
-flake8
-
-dash
-dash-bootstrap-components
-plotly
+jupyter-server-proxy==4.4.0
 
+streamlit
 openai
 rapidfuzz
 nltk
-spacy==3.4.4
-numpy==1.21.6
-scikit-learn==1.1.3
 
 pdfplumber
 python-dotenv
diff --git a/streamlit_app.py b/streamlit_app.py
new file mode 100644
index 0000000000000000000000000000000000000000..4fd2a13de4baade8a912e1455b3ad340483dfa8b
--- /dev/null
+++ b/streamlit_app.py
@@ -0,0 +1,4 @@
+import streamlit as st 
+
+
+st.title("This is my app!")
diff --git a/streamlit_proxy/my_app_proxy.py b/streamlit_proxy/my_app_proxy.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfed48bf980f5a39cd5e4b1c8c07a50daf866f47
--- /dev/null
+++ b/streamlit_proxy/my_app_proxy.py
@@ -0,0 +1,21 @@
+def setup_my_app_proxy():
+    command = [
+        "/streamlit/app/run_my_app.sh",
+        "--browser.gatherUsageStats", "false",
+        "--browser.serverAddress", "0.0.0.0",
+        "--server.port", "{port}",
+        "--server.headless", "true",
+        "--server.enableCORS", "false",
+        "--server.enableXsrfProtection", "false",
+    ]
+
+    return {
+        "command": command,
+        "timeout": 20,
+        "new_browser_tab": False,
+        "launcher_entry": {
+            "enabled": True,
+            "title": "MyApp",
+            "icon_path": "/streamlit/app/streamlit-favicon.svg"
+        }
+    }
\ No newline at end of file
diff --git a/dash_proxy/setup.py b/streamlit_proxy/setup.py
similarity index 53%
rename from dash_proxy/setup.py
rename to streamlit_proxy/setup.py
index 8e1ebf5761c3de7f783ce204b25474e10f2b0d8d..2e5da0e998bfba8aa04816a786f588280bb4bb33 100644
--- a/dash_proxy/setup.py
+++ b/streamlit_proxy/setup.py
@@ -3,15 +3,14 @@ import setuptools
 setuptools.setup(
     author="Julian Rasch",
     author_email="julian.rasch@fh-muenster.de",
-    description="A small module to run Dash inside a dockerized Jupyterlab.",
-    name="jupyter-dash-proxy",
-    py_modules=["dash_proxy", "my_app_proxy"],
+    description="A small module to run Streamlit inside a dockerized JupyterLab.",
+    name="jupyter-streamlit-proxy",
+    py_modules=["streamlit_proxy", "my_app_proxy"],
     entry_points={
         "jupyter_serverproxy_servers": [
-            # name = packagename:function_name
-            "Dash = dash_proxy:setup_dash_proxy",
+            "Streamlit = streamlit_proxy:setup_streamlit_proxy",
             "MyApp = my_app_proxy:setup_my_app_proxy"
         ]
     },
     install_requires=["jupyter-server-proxy==4.0.0"],
-)
+)
\ No newline at end of file
diff --git a/streamlit_proxy/streamlit_proxy.py b/streamlit_proxy/streamlit_proxy.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd14608598c4c925f29c51b285631b8ebafac754
--- /dev/null
+++ b/streamlit_proxy/streamlit_proxy.py
@@ -0,0 +1,25 @@
+import os
+
+
+def setup_streamlit_proxy():
+    command = [
+        "/streamlit/app/run_streamlit_app.sh",
+        "--browser.gatherUsageStats", "false",
+        "--browser.serverAddress", "0.0.0.0",
+        "--server.port", "{port}",
+        "--server.headless", "true",
+        "--server.enableCORS", "false",
+        "--server.enableXsrfProtection", "false",
+    ]
+
+
+    return {
+        "command": command,
+        "timeout": 20,
+        "new_browser_tab": False,
+        "launcher_entry": {
+            "enabled": True,
+            "title": "Streamlit App",
+            "icon_path": "/streamlit/app/streamlit-favicon.svg"
+        }
+    }