Skip to content

Commit e188a18

Browse files
committed
Streamlit app. Update lib versions and API
1 parent 915392a commit e188a18

File tree

3 files changed

+19
-6
lines changed

3 files changed

+19
-6
lines changed

app/assets/app-info.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Redis Azure OpenAI Template
22

3-
![Azure OpenAI Redis](https://github.com/antonum/azure-openai-redis-deployment/blob/main/app/assets/diagram-small.png?raw=true)
3+
![Azure OpenAI Redis](https://github.com/redisventures/azure-openai-redis-deployment/blob/main/app/assets/diagram-small.png?raw=true)
44

55
Example application allows you to use ChatGPT to analyze the documents, previoslyy unknown to ChatGPT and/or internal to your organization.
66

app/requirements.txt

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
streamlit
22
azure.storage.blob
3+
pypdf
34
PyPDF2
4-
llama_index==0.6.5
5+
llama_index==0.8.26
56
redis
6-
langchain
7+
langchain==0.0.262
78
openai

app/streamlit_app.py

+15-3
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ def get_embeddings():
4242
#using build-in HuggingFace instead
4343
#from langchain.embeddings import HuggingFaceEmbeddings
4444
#embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
45+
4546
from langchain.embeddings import OpenAIEmbeddings
4647
embeddings = OpenAIEmbeddings(deployment=OPENAI_EMBEDDINGS_ENGINE, chunk_size=1 )
4748
else:
@@ -52,8 +53,19 @@ def get_embeddings():
5253

5354
def get_llm():
5455
if OPENAI_API_TYPE=="azure":
56+
openai.api_type = "azure"
57+
openai.api_base = os.getenv("OPENAI_API_BASE")
58+
openai.api_version = os.getenv("OPENAI_API_VERSION")
59+
openai.api_key = os.getenv("OPENAI_API_KEY")
60+
text_model_deployment = OPENAI_COMPLETIONS_ENGINE
5561
from langchain.llms import AzureOpenAI
56-
llm=AzureOpenAI(deployment_name=OPENAI_COMPLETIONS_ENGINE)
62+
llm = AzureOpenAI(deployment_name=text_model_deployment, model_kwargs={
63+
"api_key": openai.api_key,
64+
"api_base": openai.api_base,
65+
"api_type": openai.api_type,
66+
"api_version": openai.api_version,
67+
})
68+
#llm_predictor = LLMPredictor(llm=llm)
5769
else:
5870
from langchain.llms import OpenAI
5971
llm=OpenAI()
@@ -83,7 +95,7 @@ def get_query_engine():
8395

8496
# load documents
8597
documents = SimpleDirectoryReader(download_file_path).load_data()
86-
print('Document ID:', documents[0].doc_id, 'Document Hash:', documents[0].doc_hash)
98+
print('Document ID:', documents[0].doc_id)
8799

88100

89101
from llama_index.storage.storage_context import StorageContext
@@ -122,4 +134,4 @@ def get_query_engine():
122134
except Exception as e:
123135
response = "Error: %s" % str(e)
124136
st.markdown(str(response))
125-
print(str(response))
137+
#print(str(response))

0 commit comments

Comments
 (0)