From c571becff5baf943b915a8367042f137769bd27a Mon Sep 17 00:00:00 2001
From: Julian <julian@edyoucated.org>
Date: Sun, 10 Mar 2024 14:06:07 +0100
Subject: [PATCH] implemented simple GPT chat in Dash

---
 .gitignore       |  3 ++
 README.md        |  1 +
 app/callbacks.py | 52 +++++++++++++++++++++++++------
 app/layout.py    | 44 ++++++++++++++++++++------
 client.py        | 81 ++++++++++++++++++++++++++++++++++++++++++++++++
 5 files changed, 163 insertions(+), 18 deletions(-)
 create mode 100644 client.py

diff --git a/.gitignore b/.gitignore
index f75c4cd..79b85b9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,5 @@
 *env/
 .env
+
+*.pyc
+config.txt
diff --git a/README.md b/README.md
index 0afc659..2c4caf3 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,3 @@
 # A Jupyterlab for LLM
 
+AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, OPENAI_API_VERSION need to be stored in a config.txt file in the home directory.
diff --git a/app/callbacks.py b/app/callbacks.py
index 9d22226..5b288f2 100644
--- a/app/callbacks.py
+++ b/app/callbacks.py
@@ -1,3 +1,5 @@
+from datetime import datetime
+
 from dash import (
     html, 
     Dash
@@ -8,16 +10,48 @@ from dash.dependencies import (
     State
 )
 
+from client import ChatGPT
+
+
+def format_chat_messages(chat_history):
+    chat_messages = []
+    for message in chat_history:
+        chat_messages.append(html.Div([
+            html.P(f'{message["sender"]}: {message["message"]}'),
+            html.P(f'Sent at: {message["timestamp"]}')
+        ]))
+    return chat_messages
+
 
 def register_callbacks(app: Dash):
+    
+    chat_gpt = ChatGPT(model="gpt4")
+    
     @app.callback(
-        Output("output-container", "children"),
-        [Input("send-button", "n_clicks")],
-        [State("input-text", "value")]
+        [Output('chat-container', 'children'),
+        Output('chat-history', 'data')],
+        [Input('send-button', 'n_clicks')],
+        [State('user-input', 'value'),
+        State('chat-history', 'data')]
     )
-    def generate_response(n_clicks, input_text):
-        if n_clicks > 0:
-            response = "You said: " + input_text
-            return html.Div(response)
-        else:
-            return ""
+    def update_chat(n_clicks, input_value, chat_history):
+        if chat_history is None:
+            chat_history = []
+        
+        if n_clicks > 0 and input_value:
+            chat_history.append({
+                'sender': 'User',
+                'message': input_value,
+                'timestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+            })
+            
+            response = chat_gpt.chat_with_gpt(input_value)
+            
+            # Add response to chat history
+            chat_history.append({
+                'sender': 'Language Model',
+                'message': response,
+                'timestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+            })
+        
+        return format_chat_messages(chat_history), chat_history
diff --git a/app/layout.py b/app/layout.py
index fbc93fa..4d82a24 100644
--- a/app/layout.py
+++ b/app/layout.py
@@ -3,12 +3,38 @@ from dash import (
     dcc
 )
 
-layout = html.Div(
-    className="container",
-    children=[
-        html.H1("GPT Chat", className="mt-5 mb-4"),
-        dcc.Textarea(id="input-text", placeholder="Enter your message:", className="form-control mb-3"),
-        html.Button("Send", id="send-button", n_clicks=0, className="btn btn-primary mb-3"),
-        html.Div(id="output-container")
-    ]
-)
+layout = html.Div([
+    dcc.Store(
+        id='chat-history', 
+        data=[]
+    ),
+    html.H1(
+        "Simple Chat App", 
+        style={'text-align': 'center'}
+    ),
+    html.Div(
+        id='chat-container', 
+        style={'overflowY': 'scroll', 'height': '70vh', 'padding': '10px'}
+    ),
+    html.Div([
+        dcc.Input(
+            id='user-input', 
+            type='text', 
+            placeholder='Type your message...', 
+            debounce=True
+        ),
+        html.Button(
+            'Send', 
+            id='send-button', 
+            n_clicks=0
+        )
+    ], style={
+        'display': 'flex', 
+        'alignItems': 'center', 
+        'justifyContent': 'center', 
+        'position': 'fixed', 
+        'bottom': 0, 
+        'width': '100%', 
+        'padding': '10px'
+    })
+], style={'position': 'relative'})
diff --git a/client.py b/client.py
new file mode 100644
index 0000000..5b95e07
--- /dev/null
+++ b/client.py
@@ -0,0 +1,81 @@
+import os
+from openai import AzureOpenAI
+from dotenv import load_dotenv
+
+from enum import Enum
+
+
+found_dotenv = load_dotenv(
+    "config.txt",
+    override=True
+)
+
+if not found_dotenv: 
+    raise ValueError("Could not detect .env-file.")
+
+AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
+AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
+OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION")
+
+class OpenAIModels(Enum):
+    GPT_3 = "gpt3"
+    GPT_4 = "gpt4"
+    EMBED = "embed"
+
+    @classmethod
+    def get_all_values(cls):
+        return [member.value for member in cls]
+
+
+def get_openai_client(model: str) -> AzureOpenAI:
+    if not model in OpenAIModels.get_all_values():
+        raise ValueError(f"<model> needs to be one of {OpenAIModels.get_all_values()}.")
+    
+    if any(p is None for p in (AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_KEY, OPENAI_API_VERSION)):
+        raise ValueError(
+            f"""None of the following parameters can be none: 
+            AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
+            AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
+            OPENAI_API_VERSION: {OPENAI_API_VERSION}
+            """
+        )
+    
+    client = AzureOpenAI(
+        api_key=AZURE_OPENAI_API_KEY,
+        azure_endpoint=AZURE_OPENAI_ENDPOINT,
+        api_version=OPENAI_API_VERSION, 
+        azure_deployment=model
+    )
+    return client
+
+
+class ChatGPT:
+    def __init__(self, model="gpt4"):
+        self.model = model
+        self.client = get_openai_client(model=model)
+        self.messages = []
+
+    def chat_with_gpt(self, user_input: str):
+        self.messages.append({
+            "role": "user",
+            "content": user_input
+        })
+        response = self._generate_response(self.messages)
+        return response
+
+    def _generate_response(self, messages):
+        response = self.client.chat.completions.create(
+            model=self.model,
+            messages=messages,        
+            temperature=0.2, 
+            max_tokens=150,
+            top_p=1.0
+        )
+        response_message = response.choices[0].message
+        self.messages.append({
+            "role": response_message.role,
+            "content": response_message.content
+        })
+
+        return response_message.content
+
-- 
GitLab