-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathsettings.py
139 lines (116 loc) · 6.47 KB
/
settings.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
import streamlit as st
from modules.sqlrag_module import get_tables
from llama_index.llms.openai import OpenAI
from llama_index.llms.ollama import Ollama
from llama_index.llms.anthropic import Anthropic
def read_prompt_file(file_path):
try:
with open(file_path, 'r', encoding='utf-8') as file:
return file.read()
except UnicodeDecodeError:
st.error("Error reading the file. Please check the file encoding.")
return ""
def save_prompt(file_path, content):
print("saving prompt!")
try:
with open(file_path, 'w', encoding='utf-8') as file:
print("opened file!")
file.write(content)
st.success(f"Prompt successfully saved to {file_path}")
except Exception as e:
st.error(f"Error saving the prompt: {e}")
DEFUALT_DIRECT_LLM_PROMPT = read_prompt_file("./prompts/default/DEFUALT_DIRECT_LLM_PROMPT.txt")
DEFAULT_LLM_QUERY_TOOL_DESCRIPTION = read_prompt_file("./prompts/default/DEFAULT_LLM_QUERY_TOOL_DESCRIPTION.txt")
DEFAULT_RAPTOR_QUERY_TOOL_DESCRIPTION = read_prompt_file("./prompts/default/DEFAULT_RAPTOR_QUERY_TOOL_DESCRIPTION.txt")
DEFUALT_SQL_RAG_QUERY_TOOL_DESCRIPTION = read_prompt_file("./prompts/default/DEFAULT_SQL_RAG_QUERY_TOOL_DESCRIPTION.txt")
DEFAULT_WEB_SCRAPER_QUERY_TOOL_DESCRIPTION = read_prompt_file("./prompts/default/DEFAULT_WEB_SCRAPER_QUERY_TOOL_DESCRIPTION.txt")
DEFAULT_SELECTED_MODEL = "GPT"
DEFAULT_SELECTED_GPT = "gpt-4o"
DEFAULT_SELECTED_EMBEDDING_MODEL = "text-embedding-3-small"
def initialize_settings():
"""
Initialize settings.
"""
if "llm_selection" not in st.session_state:
st.session_state["llm_selection"] = {}
st.session_state["llm_selection"]["selected_model"] = DEFAULT_SELECTED_MODEL
st.session_state["llm_selection"]["selected_gpt"] = DEFAULT_SELECTED_GPT
st.session_state["llm_selection"]["selected_embedding_model"] = DEFAULT_SELECTED_EMBEDDING_MODEL
if "intent_agent_settings" not in st.session_state:
st.session_state["intent_agent_settings"] = {}
st.session_state["intent_agent_settings"]["direct_llm_prompt"] = DEFUALT_DIRECT_LLM_PROMPT
st.session_state["intent_agent_settings"]["llm_query_tool_description"] = DEFAULT_LLM_QUERY_TOOL_DESCRIPTION
st.session_state["intent_agent_settings"]["raptor_query_tool_description"] = DEFAULT_RAPTOR_QUERY_TOOL_DESCRIPTION
st.session_state["intent_agent_settings"]["sql_rag_query_tool_description"] = DEFUALT_SQL_RAG_QUERY_TOOL_DESCRIPTION
st.session_state["intent_agent_settings"]["web_scraper_query_tool_description"] = DEFAULT_WEB_SCRAPER_QUERY_TOOL_DESCRIPTION
st.session_state["intent_agent_settings"]["use_raptor"] = True
st.session_state["intent_agent_settings"]["use_sql_rag"] = True
st.session_state["intent_agent_settings"]["use_web_scraper"] = True
# RAPTOR
st.session_state["intent_agent_settings"]["similarity_top_k"] = 6
st.session_state["intent_agent_settings"]["retriever_mode"] = "collapsed"
# sql-rag
if "sql_rag_tables" not in st.session_state:
st.session_state["sql_rag_tables"] = {}
tables = get_tables()
for table in tables:
st.session_state["sql_rag_tables"][table] = True
st.session_state["generated_query.text"] = None
# web scraping tool
if "web_scraper_settings" not in st.session_state:
st.session_state["web_scraper_settings"] = {}
st.session_state["web_scraper_settings"]["max_number_of_posts"] = 15
st.session_state["web_scraper_settings"]["selected_web_url"] = "https://www.unipu.hr/novosti"
if "user_context_included" not in st.session_state:
st.session_state["user_context_included"] = False
def get_llm():
"""
Get LLM settings.
"""
if "llm_selection" not in st.session_state:
initialize_settings()
print("***Initialized settings!***")
if st.session_state["llm_selection"]["selected_model"] == "GPT":
return OpenAI(model=st.session_state["llm_selection"]["selected_gpt"], temperature=0.1, api_key=st.session_state["openai_api_key"])
# https://ollama.com/library/mistral:7b
elif st.session_state["llm_selection"]["selected_model"] == "mistral:7b":
try:
return Ollama(model="mistral", temperature=0.1)
except ValueError as e:
print(f"Model 'mistral' is not recognized: {e}")
raise ValueError("Invalid model 'mistral' for Ollama.")
# https://ollama.com/library/gemma
elif st.session_state["llm_selection"]["selected_model"] == "gemma:7b":
try:
return Ollama(model="gemma:7b", temperature=0.1)
except ValueError as e:
print(f"Model 'gemma:7b' is not recognized: {e}")
raise ValueError("Invalid model 'gemma:7b' for Ollama.")
# https://ollama.com/library/llama3
elif st.session_state["llm_selection"]["selected_model"] == "llama3:8b":
try:
return Ollama(model="llama3", temperature=0.1)
except ValueError as e:
print(f"Model 'llama3' is not recognized: {e}")
raise ValueError("Invalid model 'llama3' for Ollama.")
# https://docs.llamaindex.ai/en/latest/examples/llm/anthropic/
elif st.session_state["llm_selection"]["selected_model"] == "Claude 3 Opus":
try:
return Anthropic(model="claude-3-opus-20240229", temperature=0.1)
except ValueError as e:
print(f"Model 'claude-3-opus-20240229' is not recognized: {e}")
raise ValueError("Invalid model 'claude-3-opus-20240229' for Anthropic.")
# https://docs.llamaindex.ai/en/latest/examples/llm/anthropic/
elif st.session_state["llm_selection"]["selected_model"] == "Claude 3 Sonnet":
try:
return Anthropic(model="claude-3-sonnet-20240229", temperature=0.1)
except ValueError as e:
print(f"Model 'claude-3-sonnet-20240229' is not recognized: {e}")
raise ValueError("Invalid model 'claude-3-sonnet-20240229' for Anthropic.")
# https://docs.llamaindex.ai/en/latest/examples/llm/anthropic/
elif st.session_state["llm_selection"]["selected_model"] == "Claude 3 Haiku":
try:
return Anthropic(model="claude-3-haiku-20240307", temperature=0.1)
except ValueError as e:
print(f"Model 'claude-3-haiku-20240307' is not recognized: {e}")
raise ValueError("Invalid model 'claude-3-haiku-20240307' for Anthropic.")