Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • jr662933/jupyterhub-ai
  • buecker/jupyterhub-ai
  • buecker/jupyterhub
  • sr151511/vennemann
4 results
Show changes
Commits on Source (121)
*env/
.env
*.egg-*
*.pyc
*.txt
config.txt
run.sh
build.sh
run_my_app.sh
run_streamlit_app.sh
\ No newline at end of file
variables:
DOCKER_TLS_CERTDIR: ""
TIMEOUT: 3600
docker-build-master:
# Official docker image.
image: docker:latest
stage: build
services:
- docker:dind
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- cp $ENV_FILE .env
- docker build --pull -t "$CI_REGISTRY_IMAGE":prodcc4 .
- docker push "$CI_REGISTRY_IMAGE":prodcc4
FROM quay.io/jupyter/datascience-notebook:hub-5.2.0
USER root
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt && rm requirements.txt
# install some utilities for GPT
COPY llm_utils /llm_utils/
RUN pip install /llm_utils/
ENV CONFIG_PATH=/home/jovyan/config.txt
USER jovyan
\ No newline at end of file
FROM jupyter/base-notebook:hub-1.5.0
# vennemann
# A Jupyterlab for LLM
import setuptools
setuptools.setup(
author="Julian Rasch",
author_email="julian.rasch@fh-muenster.de",
description="Helper modules to work with LLMs.",
name="llm_utils",
package_dir={"": "src"},
packages=setuptools.find_packages(where="src"),
install_requires=[
"openai",
"python-dotenv"
]
)
import os
from openai import AzureOpenAI
from dotenv import load_dotenv
from enum import Enum
class OpenAIModels(Enum):
GPT_3 = "gpt3"
GPT_4 = "gpt4"
GPT_4o = "gpt-4o"
EMBED = "embed"
@classmethod
def get_all_values(cls):
return [member.value for member in cls]
def get_openai_client(
model: str,
config_path: str
) -> AzureOpenAI:
if not model in OpenAIModels.get_all_values():
raise ValueError(f"<model> needs to be one of {OpenAIModels.get_all_values()}.")
load_dotenv(
dotenv_path=config_path,
override=True
)
AZURE_OPENAI_API_KEY = os.environ.get("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.environ.get("AZURE_OPENAI_ENDPOINT")
OPENAI_API_VERSION = os.environ.get("OPENAI_API_VERSION")
if any(p is None for p in (AZURE_OPENAI_API_KEY, AZURE_OPENAI_API_KEY, OPENAI_API_VERSION)):
raise ValueError(
f"""None of the following parameters can be None:
AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
AZURE_OPENAI_API_KEY: {AZURE_OPENAI_API_KEY},
OPENAI_API_VERSION: {OPENAI_API_VERSION}
"""
)
client = AzureOpenAI(
api_key=AZURE_OPENAI_API_KEY,
azure_endpoint=AZURE_OPENAI_ENDPOINT,
# api_version=OPENAI_API_VERSION,
azure_deployment=model
)
return client
class ChatGPT:
def __init__(self, client: AzureOpenAI, model: str):
self.model = model
self.client = client
self.messages = []
def chat_with_gpt(self, user_input: str):
self.messages.append({
"role": "user",
"content": user_input
})
response = self._generate_response(self.messages)
return response
def _generate_response(self, messages):
response = self.client.chat.completions.create(
model=self.model,
messages=messages,
temperature=0.2,
max_tokens=150,
top_p=1.0
)
response_message = response.choices[0].message
self.messages.append({
"role": response_message.role,
"content": response_message.content
})
return response_message.content
# jupyter-server-proxy==4.4.0
# dash
# dash-bootstrap-components
flake8
openai
rapidfuzz
nltk
plotly
scikit-learn
pdfplumber
python-dotenv