Skip to content

configure OpenAI client for rgvaiclass api keys #27

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ celerybeat-schedule
env/
venv/
ENV/
#Jofred's env :^)
llmEnv/
env.bak/
venv.bak/
*.env
Expand Down Expand Up @@ -180,4 +182,4 @@ staticfiles/

*.sqlite3

demoapp.py
demoapp.py
16 changes: 9 additions & 7 deletions aistarterkit/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,13 +184,15 @@

LOGIN_URL = "login"

OPENAI_API_TYPE = os.getenv("OPENAI_API_TYPE", "openai")
OPENAI_API_VERSION = os.getenv("OPENAI_API_VERSION", "2024-10-21")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

# Azure OpenAI Settings
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_ENDPOINT = os.getenv("AZURE_OPENAI_ENDPOINT")
# OpenAI Settings
OPENAI_API_TYPE = os.getenv('OPENAI_API_TYPE', 'openai') # or 'azure'
OPENAI_API_VERSION = os.getenv('OPENAI_API_VERSION', '2024-02-15')
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
OPENAI_API_BASE = os.getenv('OPENAI_API_BASE', 'https://api.openai.com/v1/')

# Azure OpenAI Settings (if using Azure)
AZURE_OPENAI_API_KEY = os.getenv('AZURE_OPENAI_API_KEY')
AZURE_OPENAI_ENDPOINT = os.getenv('AZURE_OPENAI_ENDPOINT')

AUTH_USER_MODEL = "chat.CustomUser"

Expand Down
2 changes: 1 addition & 1 deletion chat/ai/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
azure_endpoint=settings.AZURE_OPENAI_ENDPOINT,
)
else:
client = OpenAI()
client = OpenAI(base_url = settings.OPENAI_API_BASE, api_key = settings.OPENAI_API_KEY )


class Agent:
Expand Down
91 changes: 86 additions & 5 deletions chat/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.exceptions import AuthenticationFailed
from django.http import JsonResponse
import json
import logging
from openai import OpenAI

logger = logging.getLogger(__name__)


class CustomLoginView(LoginView):
Expand Down Expand Up @@ -233,6 +239,13 @@ def create_thread(request):

# Redirect the user to the new thread's detail page
return redirect("thread_detail", pk=new_thread.pk)
# if request.method == 'POST':
# thread = Thread.objects.create(
# user=request.user,
# name='New Thread' # This will be our default_name
# )
# return JsonResponse({'threadId': thread.id})
# return JsonResponse({'error': 'Invalid request method'}, status=400)


@login_required
Expand All @@ -250,12 +263,41 @@ def new_message(request, pk):
thread = get_object_or_404(Thread, pk=pk)
if request.method == "POST":
form = MessageForm(request.POST)
thread_form = ThreadForm(
request.POST, instance=thread
) # Pass the current thread instance
thread_form = ThreadForm(request.POST, instance=thread)
if form.is_valid() and thread_form.is_valid():
message = form.save(commit=False)
thread = thread_form.save() # Save the thread form to update the thread

# Check if this is the first message in the thread
if thread.message_set.count() == 0:
try:
# Configure OpenAI client with proper settings
if settings.OPENAI_API_TYPE == "azure":
client = OpenAI(
api_key=settings.AZURE_OPENAI_API_KEY,
base_url=f"{settings.AZURE_OPENAI_ENDPOINT}/openai/deployments/{settings.AZURE_OPENAI_DEPLOYMENT_NAME}/",
api_version=settings.OPENAI_API_VERSION
)
else:
client = OpenAI(api_key=settings.OPENAI_API_KEY)

response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": "Generate a brief 27-28 character description of the following conversation starter. Make it concise but meaningful."},
{"role": "user", "content": form.cleaned_data['content']}
],
max_tokens=50,
temperature=0.7
)
new_thread_name = response.choices[0].message.content.strip()
thread.name = new_thread_name
thread.save()
except Exception as e:
logger.error(f"Error generating thread name: {e}")
# Keep default name if generation fails
pass

thread = thread_form.save()
agent = Agent(thread=thread, prompt=thread.prompt)
agent.chat(message.content)
return redirect("thread_detail", pk=thread.pk)
Expand All @@ -264,9 +306,48 @@ def new_message(request, pk):
print(thread_form.errors)
else:
form = MessageForm()
thread_form = ThreadForm(instance=thread) # Pass the current thread instance
thread_form = ThreadForm(instance=thread)
return render(
request,
"chat/new_message.html",
{"form": form, "thread_form": thread_form, "thread": thread},
)


# def create_message(request):
# if request.method == 'POST':
# data = json.loads(request.body)
# thread_id = data.get('threadId')
# content = data.get('content')

# thread = get_object_or_404(Thread, id=thread_id, user=request.user)

# # Check if this is the first message in the thread
# if thread.message_set.count() == 0:
# # Generate thread name based on the first prompt
# try:
# response = client.chat.completions.create(
# model="gpt-3.5-turbo",
# messages=[
# {"role": "system", "content": "Generate a brief 27-28 character description of the following conversation starter. Make it concise but meaningful."},
# {"role": "user", "content": content}
# ],
# max_tokens=50,
# temperature=0.7
# )
# new_thread_name = response.choices[0].message.content.strip()
# # Update thread name
# thread.name = new_thread_name
# thread.save()
# except Exception as e:
# logger.error(f"Error generating thread name: {e}")
# # Keep default name if generation fails
# pass

# # Create the message
# message = Message.objects.create(
# thread=thread,
# role='user',
# content=content
# )