Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
J
jupyterhub-ai
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Michael Bücker
jupyterhub-ai
Commits
c571becf
Commit
c571becf
authored
1 year ago
by
Julian
Browse files
Options
Downloads
Patches
Plain Diff
implemented simple GPT chat in Dash
parent
c8968342
No related branches found
Branches containing commit
No related tags found
Tags containing commit
2 merge requests
!6
Finalized Jupyterlab for the sprint
,
!2
Dev
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
.gitignore
+3
-0
3 additions, 0 deletions
.gitignore
README.md
+1
-0
1 addition, 0 deletions
README.md
app/callbacks.py
+43
-9
43 additions, 9 deletions
app/callbacks.py
app/layout.py
+35
-9
35 additions, 9 deletions
app/layout.py
client.py
+81
-0
81 additions, 0 deletions
client.py
with
163 additions
and
18 deletions
.gitignore
+
3
−
0
View file @
c571becf
*env/
.env
*.pyc
config.txt
This diff is collapsed.
Click to expand it.
README.md
+
1
−
0
View file @
c571becf
# A Jupyterlab for LLM
AZURE_OPENAI_API_KEY, AZURE_OPENAI_ENDPOINT, OPENAI_API_VERSION need to be stored in a config.txt file in the home directory.
This diff is collapsed.
Click to expand it.
app/callbacks.py
+
43
−
9
View file @
c571becf
from
datetime
import
datetime
from
dash
import
(
html
,
Dash
...
...
@@ -8,16 +10,48 @@ from dash.dependencies import (
State
)
from
client
import
ChatGPT
def
format_chat_messages
(
chat_history
):
chat_messages
=
[]
for
message
in
chat_history
:
chat_messages
.
append
(
html
.
Div
([
html
.
P
(
f
'
{
message
[
"
sender
"
]
}
:
{
message
[
"
message
"
]
}
'
),
html
.
P
(
f
'
Sent at:
{
message
[
"
timestamp
"
]
}
'
)
]))
return
chat_messages
def
register_callbacks
(
app
:
Dash
):
chat_gpt
=
ChatGPT
(
model
=
"
gpt4
"
)
@app.callback
(
Output
(
"
output-container
"
,
"
children
"
),
[
Input
(
"
send-button
"
,
"
n_clicks
"
)],
[
State
(
"
input-text
"
,
"
value
"
)]
[
Output
(
'
chat-container
'
,
'
children
'
),
Output
(
'
chat-history
'
,
'
data
'
)],
[
Input
(
'
send-button
'
,
'
n_clicks
'
)],
[
State
(
'
user-input
'
,
'
value
'
),
State
(
'
chat-history
'
,
'
data
'
)]
)
def
generate_response
(
n_clicks
,
input_text
):
if
n_clicks
>
0
:
response
=
"
You said:
"
+
input_text
return
html
.
Div
(
response
)
else
:
return
""
def
update_chat
(
n_clicks
,
input_value
,
chat_history
):
if
chat_history
is
None
:
chat_history
=
[]
if
n_clicks
>
0
and
input_value
:
chat_history
.
append
({
'
sender
'
:
'
User
'
,
'
message
'
:
input_value
,
'
timestamp
'
:
datetime
.
now
().
strftime
(
"
%Y-%m-%d %H:%M:%S
"
)
})
response
=
chat_gpt
.
chat_with_gpt
(
input_value
)
# Add response to chat history
chat_history
.
append
({
'
sender
'
:
'
Language Model
'
,
'
message
'
:
response
,
'
timestamp
'
:
datetime
.
now
().
strftime
(
"
%Y-%m-%d %H:%M:%S
"
)
})
return
format_chat_messages
(
chat_history
),
chat_history
This diff is collapsed.
Click to expand it.
app/layout.py
+
35
−
9
View file @
c571becf
...
...
@@ -3,12 +3,38 @@ from dash import (
dcc
)
layout
=
html
.
Div
(
className
=
"
container
"
,
children
=
[
html
.
H1
(
"
GPT Chat
"
,
className
=
"
mt-5 mb-4
"
),
dcc
.
Textarea
(
id
=
"
input-text
"
,
placeholder
=
"
Enter your message:
"
,
className
=
"
form-control mb-3
"
),
html
.
Button
(
"
Send
"
,
id
=
"
send-button
"
,
n_clicks
=
0
,
className
=
"
btn btn-primary mb-3
"
),
html
.
Div
(
id
=
"
output-container
"
)
]
)
layout
=
html
.
Div
([
dcc
.
Store
(
id
=
'
chat-history
'
,
data
=
[]
),
html
.
H1
(
"
Simple Chat App
"
,
style
=
{
'
text-align
'
:
'
center
'
}
),
html
.
Div
(
id
=
'
chat-container
'
,
style
=
{
'
overflowY
'
:
'
scroll
'
,
'
height
'
:
'
70vh
'
,
'
padding
'
:
'
10px
'
}
),
html
.
Div
([
dcc
.
Input
(
id
=
'
user-input
'
,
type
=
'
text
'
,
placeholder
=
'
Type your message...
'
,
debounce
=
True
),
html
.
Button
(
'
Send
'
,
id
=
'
send-button
'
,
n_clicks
=
0
)
],
style
=
{
'
display
'
:
'
flex
'
,
'
alignItems
'
:
'
center
'
,
'
justifyContent
'
:
'
center
'
,
'
position
'
:
'
fixed
'
,
'
bottom
'
:
0
,
'
width
'
:
'
100%
'
,
'
padding
'
:
'
10px
'
})
],
style
=
{
'
position
'
:
'
relative
'
})
This diff is collapsed.
Click to expand it.
client.py
0 → 100644
+
81
−
0
View file @
c571becf
import
os
from
openai
import
AzureOpenAI
from
dotenv
import
load_dotenv
from
enum
import
Enum
found_dotenv
=
load_dotenv
(
"
config.txt
"
,
override
=
True
)
if
not
found_dotenv
:
raise
ValueError
(
"
Could not detect .env-file.
"
)
AZURE_OPENAI_API_KEY
=
os
.
environ
.
get
(
"
AZURE_OPENAI_API_KEY
"
)
AZURE_OPENAI_ENDPOINT
=
os
.
environ
.
get
(
"
AZURE_OPENAI_ENDPOINT
"
)
OPENAI_API_VERSION
=
os
.
environ
.
get
(
"
OPENAI_API_VERSION
"
)
class
OpenAIModels
(
Enum
):
GPT_3
=
"
gpt3
"
GPT_4
=
"
gpt4
"
EMBED
=
"
embed
"
@classmethod
def
get_all_values
(
cls
):
return
[
member
.
value
for
member
in
cls
]
def
get_openai_client
(
model
:
str
)
->
AzureOpenAI
:
if
not
model
in
OpenAIModels
.
get_all_values
():
raise
ValueError
(
f
"
<model> needs to be one of
{
OpenAIModels
.
get_all_values
()
}
.
"
)
if
any
(
p
is
None
for
p
in
(
AZURE_OPENAI_API_KEY
,
AZURE_OPENAI_API_KEY
,
OPENAI_API_VERSION
)):
raise
ValueError
(
f
"""
None of the following parameters can be none:
AZURE_OPENAI_API_KEY:
{
AZURE_OPENAI_API_KEY
}
,
AZURE_OPENAI_API_KEY:
{
AZURE_OPENAI_API_KEY
}
,
OPENAI_API_VERSION:
{
OPENAI_API_VERSION
}
"""
)
client
=
AzureOpenAI
(
api_key
=
AZURE_OPENAI_API_KEY
,
azure_endpoint
=
AZURE_OPENAI_ENDPOINT
,
api_version
=
OPENAI_API_VERSION
,
azure_deployment
=
model
)
return
client
class
ChatGPT
:
def
__init__
(
self
,
model
=
"
gpt4
"
):
self
.
model
=
model
self
.
client
=
get_openai_client
(
model
=
model
)
self
.
messages
=
[]
def
chat_with_gpt
(
self
,
user_input
:
str
):
self
.
messages
.
append
({
"
role
"
:
"
user
"
,
"
content
"
:
user_input
})
response
=
self
.
_generate_response
(
self
.
messages
)
return
response
def
_generate_response
(
self
,
messages
):
response
=
self
.
client
.
chat
.
completions
.
create
(
model
=
self
.
model
,
messages
=
messages
,
temperature
=
0.2
,
max_tokens
=
150
,
top_p
=
1.0
)
response_message
=
response
.
choices
[
0
].
message
self
.
messages
.
append
({
"
role
"
:
response_message
.
role
,
"
content
"
:
response_message
.
content
})
return
response_message
.
content
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment