From 6e421380d107f54ec12316992241e282a8ea85ee Mon Sep 17 00:00:00 2001 From: Anirban Basu Date: Wed, 12 Jun 2024 08:34:02 +0900 Subject: [PATCH] chore: Some code comments and cleanup. --- app.py | 24 ++- styles.css | 5 + ui/settings.py | 19 ++- utils/callbacks.py | 5 +- utils/constants.py | 6 - utils/retrievers.py | 17 ++- utils/state_manager.py | 340 ++++++++++++++++++++++++----------------- 7 files changed, 251 insertions(+), 165 deletions(-) diff --git a/app.py b/app.py index 2c9a660..fe55603 100644 --- a/app.py +++ b/app.py @@ -29,7 +29,9 @@ from pathlib import Path +# CWD = Current Working Directory CWD = Path(__file__).parent +# Load the external CSS file to be used as global styles for the page. extern_style = (CWD / "styles.css").read_text(encoding=constants.CHAR_ENCODING_UTF8) page_step: solara.Reactive[int] = solara.reactive(1) @@ -37,12 +39,14 @@ @solara.component def CustomLayout(children: Any = []): + """Define a custom layout for the app.""" sm.set_theme_colours() + # It is necessary to initialise the default settings before the page is rendered. sm.initialise_default_settings() with solara.AppLayout( children=children, - color=None, # sm.corrective_background_colour.value, + color=None, navigation=True, sidebar_open=False, ) as app_layout: @@ -70,12 +74,14 @@ def CustomLayout(children: Any = []): @solara.component def Page(): - # Remove the "This website runs on Solara" message - solara.Style(constants.UI_SOLARA_NOTICE_REMOVE) + """Define the main page.""" + # Apply the external CSS file as global styles for the page. solara.Style(extern_style) + # Numeric labels for the steps step_labels = [1, 2, 3, 4] + # Show all settings in the sidebar only if the user has passed the basic settings step. with solara.Sidebar(): if page_step.value in step_labels[2:]: settings_uic.AllSettingsCategorical() @@ -93,7 +99,7 @@ def Page(): case 1: solara.Markdown("Information") case 2: - solara.Markdown("Language model (LLM)") + solara.Markdown("Basic settings") case 3: solara.Markdown("Data") case 4: @@ -102,6 +108,7 @@ def Page(): rv.Divider() with rv.StepperItems(): with rv.StepperContent(step=1): + # Show a welcome message and the EU AI Act transparency notice and get the user to agree to it. with rv.Card(elevation=0): solara.Markdown(constants.MESSAGE_TLDRLC_WELCOME) solara.Markdown( @@ -125,10 +132,11 @@ def Page(): on_click=lambda: page_step.set(2), ) with rv.StepperContent(step=2): + # Show the basic settings to get started. with rv.Card(elevation=0): solara.Markdown( """ - ### Language model settings + ### Basic settings _You can configure other settings of the language model along with indexing and storage from the settings menu, which is available @@ -149,6 +157,7 @@ def Page(): on_click=lambda: page_step.set(3), ) with rv.StepperContent(step=3): + # Show the data ingestion options, ingest data before proceeding to the chat interface. with rv.Card(elevation=0): solara.Markdown( """ @@ -196,7 +205,7 @@ def Page(): ) with rv.CardActions(): solara.Button( - "LLM", + "Settings", icon_name="mdi-cogs", disabled=( ingest_uic.ingest_webpage_data.pending @@ -223,6 +232,7 @@ def Page(): on_click=lambda: page_step.set(4), ) with rv.StepperContent(step=4): + # Show the chat interface once the some data has been ingested. with rv.Card(elevation=0): with rv.CardActions(): solara.Button( @@ -232,9 +242,11 @@ def Page(): icon_name="mdi-page-previous", on_click=lambda: page_step.set(3), ) + # TODO: The chat interface needs to be dynamically resized if sidebar is open. chat_uic.ChatInterface() routes = [ + # Define the main route for the app with the custom layout. solara.Route(path="/", component=Page, label="TLDRLC", layout=CustomLayout), ] diff --git a/styles.css b/styles.css index d7b9972..4e9f48a 100644 --- a/styles.css +++ b/styles.css @@ -1,3 +1,8 @@ +/* Remove the "This website runs on Solara" message. */ +.v-application--wrap > div:nth-child(2) > div:nth-child(2){ + display: none !important; +} + .solara-autorouter-content { display: flex; flex-direction: column; diff --git a/ui/settings.py b/ui/settings.py index 64c190b..b353062 100644 --- a/ui/settings.py +++ b/ui/settings.py @@ -50,8 +50,11 @@ def update_llm_system_message(callback_data: Any = None): @solara.component -def LLMSettingsBasicComponent(): - """Component for the basic language model settings.""" +def BasicSettingsComponent(): + """Component for the basic settings.""" + + if sm.global_settings__llm_provider_notice.value is not constants.EMPTY_STRING: + solara.Info(icon=True, label=sm.global_settings__llm_provider_notice.value) solara.Select( label="Language model provider", @@ -390,6 +393,8 @@ def test_graphdb_connection(callback_data: Any = None): """, elevation=0, ): + if status.value is not None: + solara.display(status.value) solara.Checkbox( label="Disable Neo4j and use in-memory storage", value=sm.global_settings__neo4j_disable, @@ -422,9 +427,6 @@ def test_graphdb_connection(callback_data: Any = None): on_value=test_graphdb_connection, ) - if status.value is not None: - solara.display(status.value) - @solara.component def DocumentsIndexVectorStorageSettingsComponent(): @@ -469,6 +471,8 @@ def test_redis_connection(callback_data: Any = None): """, elevation=0, ): + if status.value is not None: + solara.display(status.value) solara.Checkbox( label="Disable Redis", value=sm.global_settings__redis_disable, @@ -493,11 +497,6 @@ def test_redis_connection(callback_data: Any = None): on_value=test_redis_connection, ) - if status.value is not None: - solara.display(status.value) - - # update_index_documents_storage_context() - @solara.component def GraphVisualisationSettingsComponent(): diff --git a/utils/callbacks.py b/utils/callbacks.py index a818632..89207ea 100644 --- a/utils/callbacks.py +++ b/utils/callbacks.py @@ -20,7 +20,10 @@ class TLDRLCLangfuseCallbackHandler(LlamaIndexCallbackHandler): - """Custom Langfuse callback handler.""" + """Custom Langfuse callback handler, which does nothing more than + calling its superclass methods. + + WARNING: This class could be deprecated in the future.""" def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/utils/constants.py b/utils/constants.py index c88dcb0..0ba902a 100644 --- a/utils/constants.py +++ b/utils/constants.py @@ -198,12 +198,6 @@ UI_STATUS_CONTAINER_HEIGHT = "300" UI_CHAT_CONTAINER_HEIGHT = "400" -UI_SOLARA_NOTICE_REMOVE = """ - .v-application--wrap > div:nth-child(2) > div:nth-child(2){ - display: none !important; - } - """ - MESSAGE_TLDRLC_WELCOME = """ ### Too Long, Didn't Read, Let's Chat (TLDRLC) diff --git a/utils/retrievers.py b/utils/retrievers.py index d1f9068..772d861 100644 --- a/utils/retrievers.py +++ b/utils/retrievers.py @@ -29,7 +29,7 @@ class VectorKnowledgeGraphRetriever(BaseRetriever): - """Custom retriever that performs both semantic search and knowledge graph search.""" + """Custom retriever that retrieves from a semantic search (vector) index and a knowledge graph index.""" def __init__( self, @@ -37,15 +37,18 @@ def __init__( knowledge_graph_retriever: BaseRetriever, mode: str = constants.BOOLEAN_OR, ) -> None: - """Init params.""" + """Initialisation parameters.""" if vector_retriever is None: - raise ValueError("A valid vector index retriever must be specified.") + raise ValueError( + "A valid semantic search (vector) index retriever must be specified." + ) if knowledge_graph_retriever is None: raise ValueError("A valid knowledge graph retriever must be specified.") if mode not in (constants.BOOLEAN_AND, constants.BOOLEAN_OR): raise ValueError( - f"Invalid retriever mode {mode}. It must be either {constants.BOOLEAN_AND} or {constants.BOOLEAN_OR}." + f"""Invalid retriever logical combination mode {mode}. + It must be either {constants.BOOLEAN_AND} (intersection) or {constants.BOOLEAN_OR} (union).""" ) self._vector_retriever = vector_retriever self._keyword_retriever = knowledge_graph_retriever @@ -55,22 +58,26 @@ def __init__( def _retrieve(self, query_bundle: QueryBundle) -> List[NodeWithScore]: """Retrieve nodes given query.""" + # Retrieve nodes from both indices vector_nodes = self._vector_retriever.retrieve(query_bundle) knowledge_graph_nodes = self._keyword_retriever.retrieve(query_bundle) vector_ids = {n.node.node_id for n in vector_nodes} knowledge_graph_ids = {n.node.node_id for n in knowledge_graph_nodes} + # Create a combined dictionary of nodes with scores combined_dict = {n.node.node_id: n for n in vector_nodes} combined_dict.update({n.node.node_id: n for n in knowledge_graph_nodes}) + # Perform set operation if self._mode == constants.BOOLEAN_AND: retrieve_ids = vector_ids.intersection(knowledge_graph_ids) elif self._mode == constants.BOOLEAN_OR: retrieve_ids = vector_ids.union(knowledge_graph_ids) else: raise ValueError( - f"Set operation is not defined for invalid retriever mode {self._mode}, which must be either {constants.BOOLEAN_AND} or {constants.BOOLEAN_OR}." + f"""Set operation is not defined for invalid retriever logical combination mode {self._mode}, + which must be either {constants.BOOLEAN_AND} (intersection) or {constants.BOOLEAN_OR} (union).""" ) retrieve_nodes = [combined_dict[rid] for rid in retrieve_ids] diff --git a/utils/state_manager.py b/utils/state_manager.py index 0ca0dde..ad9625c 100644 --- a/utils/state_manager.py +++ b/utils/state_manager.py @@ -81,7 +81,10 @@ def show_status_message(message: str, colour: str = "info", timeout: int = 4): - """Show a status message on the page.""" + """ + Update the reactive variables to be able to display a status message on a page. The + message can be displayed in the form of a toast or banner. + """ status_message.value = message status_message_colour.value = colour status_message_show.value = True @@ -204,10 +207,13 @@ def show_status_message(message: str, colour: str = "info", timeout: int = 4): global_llamaindex_chat_store: solara.Reactive[BaseChatStore] = solara.reactive(None) global_llamaindex_chat_memory: solara.Reactive[ChatMemoryBuffer] = solara.reactive(None) + """ Chatbot objects """ class MessageDict(TypedDict): + """A dictionary representing a chat message that is displayed to the user.""" + role: str content: str timestamp: str @@ -227,17 +233,26 @@ class MessageDict(TypedDict): # setting's value to the corresponding environment variable or a default value. This could be simplified # by creating a helper function that encapsulates this pattern. (GitHub Copilot suggestion.) -# def set_global_setting(setting: solara.Reactive, env_key: str, default_value: str, type_cast=str): -# """ -# Sets a global setting's value to the corresponding environment variable or a default value. -# Args: -# setting (solara.Reactive variable): The global setting to set. -# env_key (str): The key of the environment variable. -# default_value (str): The default value to use if the environment variable is not set. -# type_cast (type): The type to cast the environment variable value to. Defaults to str. -# """ -# setting.value = type_cast(os.getenv(env_key, default_value)) +def set_global_setting( + setting: solara.Reactive, env_key: str, default_value: str = None, type_cast=str +): + """ + Sets a global setting's value to the corresponding environment variable or a default value. + + Args: + setting (solara.Reactive variable): The global setting to set. + env_key (str): The key of the environment variable. + default_value (str): The default value to use if the environment variable is not set. + type_cast (type): The type to cast the environment variable value to. Defaults to str. + """ + if type_cast == "bool": + setting.value = type_cast( + os.getenv(env_key, default_value).lower() in ["true", "yes", "t", "y", "on"] + ) + else: + setting.value = type_cast(os.getenv(env_key, default_value)) + # Usage # set_global_setting(global_settings__openai_api_key, constants.ENV_KEY_OPENAI_API_KEY, None) @@ -335,10 +350,14 @@ def update_llm_settings(callback_data: Any = None): system_prompt=global_settings__llm_system_message.value, ) Settings.embed_model = CohereEmbedding( + # TODO: Should this be cohere_api_key or api_key? cohere_api_key=global_settings__cohere_api_key.value, input_type="search_query", ) - global_settings__llm_provider_notice.value = "Cohere is being used as the language model provider. Ensure that you have set the Cohere API key correctly from the Settings page." + global_settings__llm_provider_notice.value = """ + Cohere is being used as the language model provider. + Ensure that you have set the Cohere API key correctly from the Settings page. + """ case constants.LLM_PROVIDER_OPENAI: Settings.llm = OpenAI( api_key=global_settings__openai_api_key.value, @@ -349,7 +368,10 @@ def update_llm_settings(callback_data: Any = None): Settings.embed_model = OpenAIEmbedding( api_key=global_settings__openai_api_key.value, ) - global_settings__llm_provider_notice.value = "Open AI is being used as the language model provider. Ensure that you have set the Open AI API key correctly from the Settings page." + global_settings__llm_provider_notice.value = """ + Open AI is being used as the language model provider. + Ensure that you have set the Open AI API key correctly from the Settings page. + """ case constants.LLM_PROVIDER_LLAMAFILE: Settings.llm = Llamafile( base_url=global_settings__llamafile_url.value, @@ -360,6 +382,7 @@ def update_llm_settings(callback_data: Any = None): Settings.embed_model = LlamafileEmbedding( base_url=global_settings__llamafile_url.value, ) + global_settings__llm_provider_notice.value = constants.EMPTY_STRING case constants.LLM_PROVIDER_OLLAMA: Settings.llm = Ollama( model=global_settings__ollama_model.value, @@ -377,6 +400,7 @@ def update_llm_settings(callback_data: Any = None): def update_chatbot_settings(callback_data: Any = None): + """Update the chatbot settings.""" if global_llamaindex_chat_store.value is None: global_llamaindex_chat_store.value = SimpleChatStore() @@ -392,6 +416,7 @@ def update_chatbot_settings(callback_data: Any = None): def update_graph_storage_context(gs: Neo4jPropertyGraphStore = None): + """Update the graph storage context.""" if not global_settings__neo4j_disable.value: if gs is None: gs = Neo4jPropertyGraphStore( @@ -407,6 +432,7 @@ def update_graph_storage_context(gs: Neo4jPropertyGraphStore = None): else: global_llamaindex_storage_context.value.property_graph_store = gs else: + # Note that the SimplePropertyGraphStore does not support all the features of Neo4j. if global_llamaindex_storage_context.value is None: global_llamaindex_storage_context.value = StorageContext.from_defaults( property_graph_store=SimplePropertyGraphStore() @@ -418,6 +444,7 @@ def update_graph_storage_context(gs: Neo4jPropertyGraphStore = None): def update_index_documents_vector_storage_context(): + """Update the document and vector storage context.""" if not global_settings__redis_disable.value: global_cache__ingestion.value = RedisCache( redis_uri=global_settings__redis_url.value, @@ -468,113 +495,136 @@ def update_index_documents_vector_storage_context(): def initialise_default_settings(): """Load the global settings from the environment variables.""" + # Load the settings only once. if not global_settings_initialised.value: """ Load the environment variables from the .env file, if present. """ load_dotenv() """ Language model settings """ - global_settings__language_model_provider.value = os.getenv( - constants.ENV_KEY_LLM_PROVIDER, constants.DEFAULT_SETTING_LLM_PROVIDER + set_global_setting( + global_settings__language_model_provider, + constants.ENV_KEY_LLM_PROVIDER, + constants.DEFAULT_SETTING_LLM_PROVIDER, ) - global_settings__cohere_api_key.value = os.getenv( - constants.ENV_KEY_COHERE_API_KEY, None + set_global_setting( + global_settings__cohere_api_key, constants.ENV_KEY_COHERE_API_KEY ) - global_settings__cohere_model.value = os.getenv( - constants.ENV_KEY_COHERE_MODEL, constants.DEFAULT_SETTING_COHERE_MODEL + + set_global_setting( + global_settings__cohere_model, + constants.ENV_KEY_COHERE_MODEL, + constants.DEFAULT_SETTING_COHERE_MODEL, ) - global_settings__openai_model.value = os.getenv( - constants.ENV_KEY_OPENAI_MODEL, constants.DEFAULT_SETTING_OPENAI_MODEL + + set_global_setting( + global_settings__openai_api_key, constants.ENV_KEY_OPENAI_API_KEY ) - global_settings__openai_api_key.value = os.getenv( - constants.ENV_KEY_OPENAI_API_KEY, None + + set_global_setting( + global_settings__openai_model, + constants.ENV_KEY_OPENAI_MODEL, + constants.DEFAULT_SETTING_OPENAI_MODEL, ) - global_settings__llamafile_url.value = os.getenv( - constants.ENV_KEY_LLAMAFILE_URL, constants.DEFAULT_SETTING_LLAMAFILE_URL + + set_global_setting( + global_settings__llamafile_url, + constants.ENV_KEY_LLAMAFILE_URL, + constants.DEFAULT_SETTING_LLAMAFILE_URL, ) - global_settings__ollama_url.value = os.getenv( - constants.ENV_KEY_OLLAMA_URL, constants.DEFAULT_SETTING_OLLAMA_URL + + set_global_setting( + global_settings__ollama_url, + constants.ENV_KEY_OLLAMA_URL, + constants.DEFAULT_SETTING_OLLAMA_URL, ) - global_settings__ollama_model.value = os.getenv( - constants.ENV_KEY_OLLAMA_MODEL, constants.DEFAULT_SETTING_OLLAMA_MODEL + + set_global_setting( + global_settings__ollama_model, + constants.ENV_KEY_OLLAMA_MODEL, + constants.DEFAULT_SETTING_OLLAMA_MODEL, ) - global_settings__llm_temperature.value = float( - os.getenv( - constants.ENV_KEY_LLM_TEMPERATURE, - constants.DEFAULT_SETTING_LLM_TEMPERATURE, - ) + + set_global_setting( + global_settings__llm_temperature, + constants.ENV_KEY_LLM_TEMPERATURE, + constants.DEFAULT_SETTING_LLM_TEMPERATURE, + float, ) - global_settings__llm_request_timeout.value = int( - os.getenv( - constants.ENV_KEY_LLM_REQUEST_TIMEOUT, - constants.DEFAULT_SETTING_LLM_REQUEST_TIMEOUT, - ) + + set_global_setting( + global_settings__llm_request_timeout, + constants.ENV_KEY_LLM_REQUEST_TIMEOUT, + constants.DEFAULT_SETTING_LLM_REQUEST_TIMEOUT, + int, ) - global_settings__llm_system_message.value = os.getenv( + + set_global_setting( + global_settings__llm_system_message, constants.ENV_KEY_LLM_SYSTEM_MESSAGE, constants.DEFAULT_SETTING_LLM_SYSTEM_MESSAGE, ) """ Data ingestion settings """ - global_settings__data_ingestion_chunk_size.value = int( - os.getenv( - constants.ENV_KEY_DI_CHUNK_SIZE, - constants.DEFAULT_SETTING_DI_CHUNK_SIZE, - ) - ) - global_settings__data_ingestion_chunk_overlap.value = int( - os.getenv( - constants.ENV_KEY_DI_CHUNK_OVERLAP, - constants.DEFAULT_SETTING_DI_CHUNK_OVERLAP, - ) - ) - global_settings__di_enable_title_extractor.value = bool( - os.getenv( - constants.ENV_KEY_DI_ENABLE_TITLE_EXTRACTOR, - constants.DEFAULT_SETTING_DI_ENABLE_TITLE_EXTRACTOR, - ).lower() - in ["true", "yes", "t", "y", "on"] - ) - global_settings__di_enable_title_extractor_nodes.value = int( - os.getenv( - constants.ENV_KEY_DI_TITLE_EXTRACTOR_NODES, - constants.DEFAULT_SETTING_DI_TITLE_EXTRACTOR_NODES, - ) - ) - global_settings__di_enable_keyword_extractor.value = bool( - os.getenv( - constants.ENV_KEY_DI_ENABLE_KEYWORD_EXTRACTOR, - constants.DEFAULT_SETTING_DI_ENABLE_KEYWORD_EXTRACTOR, - ).lower() - in ["true", "yes", "t", "y", "on"] - ) - global_settings__di_enable_keyword_extractor_keywords.value = int( - os.getenv( - constants.ENV_KEY_DI_KEYWORD_EXTRACTOR_KEYWORDS, - constants.DEFAULT_SETTING_DI_KEYWORD_EXTRACTOR_KEYWORDS, - ) - ) - global_settings__di_enable_qa_extractor.value = bool( - os.getenv( - constants.ENV_KEY_DI_ENABLE_QA_EXTRACTOR, - constants.DEFAULT_SETTING_DI_ENABLE_QA_EXTRACTOR, - ).lower() - in ["true", "yes", "t", "y", "on"] - ) - global_settings__di_enable_qa_extractor_questions.value = int( - os.getenv( - constants.ENV_KEY_DI_QA_EXTRACTOR_QUESTIONS, - constants.DEFAULT_SETTING_DI_QA_EXTRACTOR_QUESTIONS, - ) - ) - global_settings__di_enable_summary_extractor.value = bool( - os.getenv( - constants.ENV_KEY_DI_ENABLE_SUMMARY_EXTRACTOR, - constants.DEFAULT_SETTING_DI_ENABLE_SUMMARY_EXTRACTOR, - ).lower() - in ["true", "yes", "t", "y", "on"] - ) + set_global_setting( + global_settings__data_ingestion_chunk_size, + constants.ENV_KEY_DI_CHUNK_SIZE, + constants.DEFAULT_SETTING_DI_CHUNK_SIZE, + int, + ) + + set_global_setting( + global_settings__data_ingestion_chunk_overlap, + constants.ENV_KEY_DI_CHUNK_OVERLAP, + constants.DEFAULT_SETTING_DI_CHUNK_OVERLAP, + int, + ) + + set_global_setting( + global_settings__di_enable_title_extractor, + constants.ENV_KEY_DI_ENABLE_TITLE_EXTRACTOR, + constants.DEFAULT_SETTING_DI_ENABLE_TITLE_EXTRACTOR, + bool, + ) + set_global_setting( + global_settings__di_enable_title_extractor_nodes, + constants.ENV_KEY_DI_TITLE_EXTRACTOR_NODES, + constants.DEFAULT_SETTING_DI_TITLE_EXTRACTOR_NODES, + int, + ) + set_global_setting( + global_settings__di_enable_keyword_extractor, + constants.ENV_KEY_DI_ENABLE_KEYWORD_EXTRACTOR, + constants.DEFAULT_SETTING_DI_ENABLE_KEYWORD_EXTRACTOR, + bool, + ) + set_global_setting( + global_settings__di_enable_keyword_extractor_keywords, + constants.ENV_KEY_DI_KEYWORD_EXTRACTOR_KEYWORDS, + constants.DEFAULT_SETTING_DI_KEYWORD_EXTRACTOR_KEYWORDS, + int, + ) + set_global_setting( + global_settings__di_enable_qa_extractor, + constants.ENV_KEY_DI_ENABLE_QA_EXTRACTOR, + constants.DEFAULT_SETTING_DI_ENABLE_QA_EXTRACTOR, + bool, + ) + set_global_setting( + global_settings__di_enable_qa_extractor_questions, + constants.ENV_KEY_DI_QA_EXTRACTOR_QUESTIONS, + constants.DEFAULT_SETTING_DI_QA_EXTRACTOR_QUESTIONS, + int, + ) + set_global_setting( + global_settings__di_enable_summary_extractor, + constants.ENV_KEY_DI_ENABLE_SUMMARY_EXTRACTOR, + constants.DEFAULT_SETTING_DI_ENABLE_SUMMARY_EXTRACTOR, + bool, + ) + + # TODO: Update the set_global_setting method to be able to split text values into lists. global_settings__di_enable_summary_extractor_summaries.value = os.getenv( constants.ENV_KEY_DI_SUMMARY_EXTRACTOR_SUMMARIES, constants.DEFAULT_SETTING_DI_SUMMARY_EXTRACTOR_SUMMARIES, @@ -582,62 +632,75 @@ def initialise_default_settings(): """ Index and chat settings """ - global_settings__index_memory_token_limit.value = int( - os.getenv( - constants.ENV_KEY_INDEX_MEMORY_TOKEN_LIMIT, - constants.DEFAULT_SETTING_INDEX_MEMORY_TOKEN_LIMIT, - ) + set_global_setting( + global_settings__index_memory_token_limit, + constants.ENV_KEY_INDEX_MEMORY_TOKEN_LIMIT, + constants.DEFAULT_SETTING_INDEX_MEMORY_TOKEN_LIMIT, + int, ) - global_settings__index_max_triplets_per_chunk.value = int( - os.getenv( - constants.ENV_KEY_INDEX_MAX_TRIPLETS_PER_CHUNK, - constants.DEFAULT_SETTING_INDEX_MAX_TRIPLETS_PER_CHUNK, - ) + + set_global_setting( + global_settings__index_max_triplets_per_chunk, + constants.ENV_KEY_INDEX_MAX_TRIPLETS_PER_CHUNK, + constants.DEFAULT_SETTING_INDEX_MAX_TRIPLETS_PER_CHUNK, + int, ) - global_settings__index_include_embeddings.value = bool( - os.getenv( - constants.ENV_KEY_INDEX_INCLUDE_EMBEDDINGS, - constants.DEFAULT_SETTING_INDEX_INCLUDE_EMBEDDINGS, - ).lower() - in ["true", "yes", "t", "y", "on"] + set_global_setting( + global_settings__index_include_embeddings, + constants.ENV_KEY_INDEX_INCLUDE_EMBEDDINGS, + constants.DEFAULT_SETTING_INDEX_INCLUDE_EMBEDDINGS, + bool, ) - global_settings__index_chat_mode.value = os.getenv( + + set_global_setting( + global_settings__index_chat_mode, constants.ENV_KEY_INDEX_CHAT_MODE, constants.DEFAULT_SETTING_INDEX_CHAT_MODE, ) """ Neo4j settings """ - global_settings__neo4j_disable.value = bool( - os.getenv( - constants.ENV_KEY_NEO4J_DISABLE, constants.DEFAULT_SETTING_NEO4J_DISABLE - ).lower() - in ["true", "yes", "t", "y", "on"] + set_global_setting( + global_settings__neo4j_disable, + constants.ENV_KEY_NEO4J_DISABLE, + constants.DEFAULT_SETTING_NEO4J_DISABLE, + bool, ) - global_settings__neo4j_url.value = os.getenv( - constants.ENV_KEY_NEO4J_URL, constants.DEFAULT_SETTING_NEO4J_URL + set_global_setting( + global_settings__neo4j_url, + constants.ENV_KEY_NEO4J_URL, + constants.DEFAULT_SETTING_NEO4J_URL, ) - global_settings__neo4j_username.value = os.getenv( - constants.ENV_KEY_NEO4J_USERNAME, constants.DEFAULT_SETTING_NEO4J_USERNAME + set_global_setting( + global_settings__neo4j_username, + constants.ENV_KEY_NEO4J_USERNAME, + constants.DEFAULT_SETTING_NEO4J_USERNAME, ) - global_settings__neo4j_password.value = os.getenv( - constants.ENV_KEY_NEO4J_PASSWORD, None + set_global_setting( + global_settings__neo4j_password, + constants.ENV_KEY_NEO4J_PASSWORD, ) - global_settings__neo4j_db_name.value = os.getenv( - constants.ENV_KEY_NEO4J_DB_NAME, constants.DEFAULT_SETTING_NEO4J_DB_NAME + set_global_setting( + global_settings__neo4j_db_name, + constants.ENV_KEY_NEO4J_DB_NAME, + constants.DEFAULT_SETTING_NEO4J_DB_NAME, ) """ Redis settings """ - global_settings__redis_disable.value = bool( - os.getenv( - constants.ENV_KEY_REDIS_DISABLE, constants.DEFAULT_SETTING_REDIS_DISABLE - ).lower() - in ["true", "yes", "t", "y", "on"] + set_global_setting( + global_settings__redis_disable, + constants.ENV_KEY_REDIS_DISABLE, + constants.DEFAULT_SETTING_REDIS_DISABLE, + bool, ) - global_settings__redis_url.value = os.getenv( - constants.ENV_KEY_REDIS_URL, constants.DEFAULT_SETTING_REDIS_URL + set_global_setting( + global_settings__redis_url, + constants.ENV_KEY_REDIS_URL, + constants.DEFAULT_SETTING_REDIS_URL, ) - global_settings__redis_namespace.value = os.getenv( - constants.ENV_KEY_REDIS_NAMESPACE, constants.DEFAULT_SETTING_REDIS_NAMESPACE + set_global_setting( + global_settings__redis_namespace, + constants.ENV_KEY_REDIS_NAMESPACE, + constants.DEFAULT_SETTING_REDIS_NAMESPACE, ) setup_langfuse() @@ -648,9 +711,11 @@ def initialise_default_settings(): update_graph_storage_context() update_index_documents_vector_storage_context() + # Set this to true so that the settings are not loaded again. global_settings_initialised.value = True +# TODO: Not being used and will be removed in the future. corrective_background_colour: solara.Reactive[str] = solara.reactive( constants.EMPTY_STRING ) @@ -675,6 +740,7 @@ def set_theme_colours(): solara.lab.theme.themes.dark.info = "#00bcd4" solara.lab.theme.themes.dark.success = "#8bc34a" + # TODO: Not being used and will be removed in the future. corrective_background_colour.value = ( solara.lab.theme.themes.dark.secondary if solara.lab.use_dark_effective()