Skip to content
This repository has been archived by the owner on Sep 15, 2024. It is now read-only.

Commit

Permalink
feat: Added a notice for the user to set API keys correctly. Setup ex…
Browse files Browse the repository at this point in the history
…perimental Redis vector store -- this may be removed/changed later.
  • Loading branch information
anirbanbasu committed May 13, 2024
1 parent 412a10e commit 7bb4d1a
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 0 deletions.
8 changes: 8 additions & 0 deletions pages/ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,6 +766,14 @@ def Page():
):
solara.Markdown(f"{global_state.status_message.value}")

if (
global_state.global_settings__llm_provider_notice.value
is not constants.EMPTY_STRING
):
solara.Info(
icon=True, label=global_state.global_settings__llm_provider_notice.value
)

with rv.ExpansionPanels(popout=True, hover=True, accordion=True):
with rv.ExpansionPanel():
with rv.ExpansionPanelHeader():
Expand Down
42 changes: 42 additions & 0 deletions utils/global_state.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@
from llama_index.storage.index_store.redis import RedisIndexStore
from llama_index.storage.docstore.redis import RedisDocumentStore
from llama_index.storage.kvstore.redis import RedisKVStore
from llama_index.vector_stores.redis import RedisVectorStore
from llama_index.core.graph_stores.simple import SimpleGraphStore
from llama_index.core.vector_stores.simple import SimpleVectorStore
from llama_index.graph_stores.neo4j import Neo4jGraphStore
from llama_index.core import Settings
from llama_index.core.callbacks import CallbackManager
Expand Down Expand Up @@ -89,6 +91,9 @@ def show_status_message(message: str, colour: str = "info", timeout: int = 4):
global_settings__language_model_provider: solara.Reactive[str] = solara.reactive(
constants.EMPTY_STRING
)
global_settings__llm_provider_notice: solara.Reactive[str] = solara.reactive(
constants.EMPTY_STRING
)
global_settings__cohere_api_key: solara.Reactive[str] = solara.reactive(
constants.EMPTY_STRING
)
Expand Down Expand Up @@ -250,13 +255,15 @@ def update_llm_settings(callback_data: Any = None):
cohere_api_key=global_settings__cohere_api_key.value,
input_type="search_query",
)
global_settings__llm_provider_notice.value = "Cohere is being used as the language model provider. Ensure that you have set the Cohere API key correctly from the Settings page."
case constants.LLM_PROVIDER_OPENAI:
Settings.llm = OpenAI(
model=global_settings__openai_model.value,
temperature=global_settings__llm_temperature.value,
system_prompt=global_settings__llm_system_message.value,
)
Settings.embed_model = OpenAIEmbedding()
global_settings__llm_provider_notice.value = "Open AI is being used as the language model provider. Ensure that you have set the Open AI API key correctly from the Settings page."
case constants.LLM_PROVIDER_OLLAMA:
Settings.llm = Ollama(
model=global_settings__ollama_model.value,
Expand All @@ -269,6 +276,7 @@ def update_llm_settings(callback_data: Any = None):
model_name=global_settings__ollama_model.value,
base_url=global_settings__ollama_url.value,
)
global_settings__llm_provider_notice.value = constants.EMPTY_STRING
Settings.chunk_size = global_settings__llm_chunk_size.value
Settings.chunk_overlap = global_settings__llm_chunk_overlap.value

Expand Down Expand Up @@ -325,23 +333,57 @@ def update_index_documents_storage_context():
redis_kvstore=kv_store,
namespace=global_settings__redis_namespace.value,
)
vector_store = RedisVectorStore(
redis_url=global_settings__redis_url.value,
overwrite=True,
)
if global_llamaindex_storage_context.value is None:
global_llamaindex_storage_context.value = StorageContext.from_defaults(
docstore=document_store,
index_store=index_store,
vector_store=vector_store,
)
else:
global_llamaindex_storage_context.value.docstore = document_store
global_llamaindex_storage_context.value.index_store = index_store
if (
global_llamaindex_storage_context.value.vector_stores.get(
global_settings__redis_namespace.value
)
is None
):
global_llamaindex_storage_context.value.add_vector_store(
vector_store=vector_store,
namespace=global_settings__redis_namespace.value,
)
else:
global_llamaindex_storage_context.value.vector_stores[
global_settings__redis_namespace.value
] = vector_store
else:
if global_llamaindex_storage_context.value is None:
global_llamaindex_storage_context.value = StorageContext.from_defaults(
docstore=SimpleDocumentStore(),
index_store=SimpleIndexStore(),
vector_store=SimpleVectorStore(),
)
else:
global_llamaindex_storage_context.value.docstore = SimpleDocumentStore()
global_llamaindex_storage_context.value.index_store = SimpleIndexStore()
if (
global_llamaindex_storage_context.value.vector_stores.get(
global_settings__redis_namespace.value
)
is None
):
global_llamaindex_storage_context.value.add_vector_store(
vector_store=SimpleVectorStore(),
namespace=global_settings__redis_namespace.value,
)
else:
global_llamaindex_storage_context.value.vector_stores[
global_settings__redis_namespace.value
] = SimpleVectorStore()


def initialise_default_settings():
Expand Down

0 comments on commit 7bb4d1a

Please sign in to comment.