diff --git a/backend/graphrag_app/api/data.py b/backend/graphrag_app/api/data.py index dae1c6c0..36fdfeea 100644 --- a/backend/graphrag_app/api/data.py +++ b/backend/graphrag_app/api/data.py @@ -50,13 +50,15 @@ response_model=StorageNameList, responses={status.HTTP_200_OK: {"model": StorageNameList}}, ) -async def get_all_data_containers(): +async def get_all_data_containers( + container_store_client=Depends(get_cosmos_container_store_client), +): """ Retrieve a list of all data containers. """ items = [] try: - container_store_client = get_cosmos_container_store_client() + # container_store_client = get_cosmos_container_store_client() for item in container_store_client.read_all_items(): if item["type"] == "data": items.append(item["human_readable_name"]) @@ -184,6 +186,8 @@ async def upload_files( ) return BaseResponse(status="Success.") except Exception as e: + # import traceback + # traceback.print_exc() logger = load_pipeline_logger() logger.error( message="Error uploading files.", diff --git a/backend/graphrag_app/api/graph.py b/backend/graphrag_app/api/graph.py index bd2bc592..9a47ed2a 100644 --- a/backend/graphrag_app/api/graph.py +++ b/backend/graphrag_app/api/graph.py @@ -3,7 +3,9 @@ import os import traceback +from io import BytesIO +import networkx as nx from fastapi import ( APIRouter, Depends, @@ -13,6 +15,7 @@ from fastapi.responses import StreamingResponse from graphrag_app.logger.load_logger import load_pipeline_logger +from graphrag_app.typing.models import GraphDataResponse from graphrag_app.utils.azure_clients import AzureClientManager from graphrag_app.utils.common import ( sanitize_name, @@ -37,6 +40,8 @@ async def get_graphml_file( container_name, sanitized_container_name: str = Depends(sanitize_name) ): + logger = load_pipeline_logger() + # validate graphml file existence azure_client_manager = AzureClientManager() graphml_filename = "graph.graphml" @@ -50,10 +55,12 @@ async def get_graphml_file( return StreamingResponse( blob_stream, media_type="application/octet-stream", - headers={"Content-Disposition": f"attachment; filename={graphml_filename}"}, + headers={ + "Content-Disposition": f"attachment; filename={graphml_filename}", + "filename": graphml_filename, + }, ) except Exception as e: - logger = load_pipeline_logger() logger.error( message="Could not fetch graphml file", cause=e, @@ -63,3 +70,36 @@ async def get_graphml_file( status_code=500, detail=f"Could not fetch graphml file for '{container_name}'.", ) + + +@graph_route.get( + "/stats/{index_name}", + summary="Retrieve basic graph statistics, number of nodes and edges", + response_model=GraphDataResponse, + responses={200: {"model": GraphDataResponse}}, + response_description="Retrieve the number of nodes and edges from the index graph", +) +async def retrieve_graph_stats(index_name: str): + logger = load_pipeline_logger() + + # validate index_name and graphml file existence + sanitized_index_name = sanitize_name(index_name) + graphml_filename = "graph.graphml" + graphml_filepath = f"output/{graphml_filename}" # expected file location of the graph based on the workflow + validate_index_file_exist(sanitized_index_name, graphml_filepath) + + try: + azure_client_manager = AzureClientManager() + storage_client = azure_client_manager.get_blob_service_client().get_blob_client( + container=sanitized_index_name, blob=graphml_filepath + ) + blob_data = storage_client.download_blob().readall() + bytes_io = BytesIO(blob_data) + g = nx.read_graphml(bytes_io) + return GraphDataResponse(nodes=len(g.nodes), edges=len(g.edges)) + except Exception: + logger.error("Could not retrieve graph data file") + raise HTTPException( + status_code=500, + detail=f"Could not retrieve graph statistics for index '{index_name}'.", + ) diff --git a/backend/graphrag_app/api/index.py b/backend/graphrag_app/api/index.py index 40941321..46c87274 100644 --- a/backend/graphrag_app/api/index.py +++ b/backend/graphrag_app/api/index.py @@ -14,6 +14,7 @@ UploadFile, status, ) +from graphrag.config.enums import IndexingMethod from kubernetes import ( client as kubernetes_client, ) @@ -57,8 +58,12 @@ async def schedule_index_job( index_container_name: str, entity_extraction_prompt: UploadFile | None = None, entity_summarization_prompt: UploadFile | None = None, - community_summarization_prompt: UploadFile | None = None, + community_summarization_graph_prompt: UploadFile | None = None, + community_summarization_text_prompt: UploadFile | None = None, + indexing_method: IndexingMethod = IndexingMethod.Standard.value, ): + indexing_method = IndexingMethod(indexing_method).value + azure_client_manager = AzureClientManager() blob_service_client = azure_client_manager.get_blob_service_client() pipelinejob = PipelineJob() @@ -87,9 +92,14 @@ async def schedule_index_job( if entity_summarization_prompt else None ) - community_summarization_prompt_content = ( - community_summarization_prompt.file.read().decode("utf-8") - if community_summarization_prompt + community_summarization_graph_content = ( + community_summarization_graph_prompt.file.read().decode("utf-8") + if community_summarization_graph_prompt + else None + ) + community_summarization_text_content = ( + community_summarization_text_prompt.file.read().decode("utf-8") + if community_summarization_text_prompt else None ) @@ -120,9 +130,14 @@ async def schedule_index_job( ) = [] existing_job._entity_extraction_prompt = entity_extraction_prompt_content existing_job._entity_summarization_prompt = entity_summarization_prompt_content - existing_job._community_summarization_prompt = ( - community_summarization_prompt_content + existing_job.community_summarization_graph_prompt = ( + community_summarization_graph_content ) + existing_job.community_summarization_text_prompt = ( + community_summarization_text_content + ) + existing_job._indexing_method = indexing_method + existing_job._epoch_request_time = int(time()) existing_job.update_db() else: @@ -132,7 +147,9 @@ async def schedule_index_job( human_readable_storage_name=storage_container_name, entity_extraction_prompt=entity_extraction_prompt_content, entity_summarization_prompt=entity_summarization_prompt_content, - community_summarization_prompt=community_summarization_prompt_content, + community_summarization_graph_prompt=community_summarization_graph_content, + community_summarization_text_prompt=community_summarization_text_content, + indexing_method=indexing_method, status=PipelineJobState.SCHEDULED, ) @@ -155,7 +172,7 @@ async def get_all_index_names( try: for item in container_store_client.read_all_items(): if item["type"] == "index": - items.append(item["human_readable_name"]) + items.append(item["human_readable_index_name"]) except Exception as e: logger = load_pipeline_logger() logger.error( @@ -245,9 +262,19 @@ async def delete_index( credential=DefaultAzureCredential(), audience=os.environ["AI_SEARCH_AUDIENCE"], ) - ai_search_index_name = f"{sanitized_container_name}_description_embedding" - if ai_search_index_name in index_client.list_index_names(): - index_client.delete_index(ai_search_index_name) + + index_names = index_client.list_index_names() + ai_search_index_report_name = f"{sanitized_container_name}-community-full_content" + if ai_search_index_report_name in index_names: + index_client.delete_index(ai_search_index_report_name) + + ai_search_index_description_name = f"{sanitized_container_name}-entity-description" + if ai_search_index_description_name in index_names: + index_client.delete_index(ai_search_index_description_name) + + ai_search_index_text_name = f"{sanitized_container_name}-text_unit-text" + if ai_search_index_text_name in index_names: + index_client.delete_index(ai_search_index_text_name) except Exception as e: logger = load_pipeline_logger() diff --git a/backend/graphrag_app/api/prompt_tuning.py b/backend/graphrag_app/api/prompt_tuning.py index 9fb4c5c5..936c4014 100644 --- a/backend/graphrag_app/api/prompt_tuning.py +++ b/backend/graphrag_app/api/prompt_tuning.py @@ -6,14 +6,16 @@ from pathlib import Path import graphrag.api as api -import yaml from fastapi import ( APIRouter, Depends, HTTPException, status, ) -from graphrag.config.create_graphrag_config import create_graphrag_config +from graphrag.config.load_config import load_config +from graphrag.config.models.graph_rag_config import GraphRagConfig +from graphrag.logger.rich_progress import RichProgressLogger +from graphrag.prompt_tune.types import DocSelectionType from graphrag_app.logger.load_logger import load_pipeline_logger from graphrag_app.utils.azure_clients import AzureClientManager @@ -32,7 +34,7 @@ ) async def generate_prompts( container_name: str, - limit: int = 5, + limit: int = 15, sanitized_container_name: str = Depends(sanitize_name), ): """ @@ -48,21 +50,31 @@ async def generate_prompts( detail=f"Storage container '{container_name}' does not exist.", ) - # load pipeline configuration file (settings.yaml) for input data and other settings - ROOT_DIR = Path(__file__).resolve().parent.parent.parent - with (ROOT_DIR / "scripts/settings.yaml").open("r") as f: - data = yaml.safe_load(f) - data["input"]["container_name"] = sanitized_container_name - graphrag_config = create_graphrag_config(values=data, root_dir=".") + # load custom pipeline settings + ROOT_DIR = Path(__file__).resolve().parent.parent.parent / "scripts/settings.yaml" + + # layer the custom settings on top of the default configuration settings of graphrag + graphrag_config: GraphRagConfig = load_config( + root_dir=ROOT_DIR.parent, + config_filepath=ROOT_DIR + ) + graphrag_config.input.container_name = sanitized_container_name # generate prompts try: prompts: tuple[str, str, str] = await api.generate_indexing_prompts( config=graphrag_config, + logger=RichProgressLogger(prefix=sanitized_container_name), root=".", limit=limit, - selection_method="random", + selection_method=DocSelectionType.AUTO, ) + prompt_content = { + "entity_extraction_prompt": prompts[0], + "entity_summarization_prompt": prompts[1], + "community_summarization_prompt": prompts[2], + } + return prompt_content # returns a fastapi.responses.JSONResponse object except Exception as e: logger = load_pipeline_logger() error_details = { @@ -77,11 +89,4 @@ async def generate_prompts( raise HTTPException( status_code=500, detail=f"Error generating prompts for data in '{container_name}'. Please try a lower limit.", - ) - - prompt_content = { - "entity_extraction_prompt": prompts[0], - "entity_summarization_prompt": prompts[1], - "community_summarization_prompt": prompts[2], - } - return prompt_content # returns a fastapi.responses.JSONResponse object + ) \ No newline at end of file diff --git a/backend/graphrag_app/api/query.py b/backend/graphrag_app/api/query.py index 11d11b22..4cf16a83 100644 --- a/backend/graphrag_app/api/query.py +++ b/backend/graphrag_app/api/query.py @@ -3,30 +3,29 @@ import os import traceback -from pathlib import Path -import yaml from fastapi import ( APIRouter, Depends, HTTPException, status, ) -from graphrag.api.query import global_search, local_search -from graphrag.config.create_graphrag_config import create_graphrag_config +from graphrag.api.query import drift_search as graphrag_drift_search +from graphrag.api.query import global_search as graphrag_global_search +from graphrag.api.query import local_search as graphrag_local_search from graphrag_app.logger.load_logger import load_pipeline_logger from graphrag_app.typing.models import ( + GraphGlobalRequest, GraphRequest, GraphResponse, ) from graphrag_app.typing.pipeline import PipelineJobState -from graphrag_app.utils.azure_clients import AzureClientManager from graphrag_app.utils.common import ( - get_df, + get_data_tables, sanitize_name, subscription_key_check, - validate_index_file_exist, + update_multi_index_context_data, ) from graphrag_app.utils.pipeline import PipelineJob @@ -45,75 +44,56 @@ response_model=GraphResponse, responses={status.HTTP_200_OK: {"model": GraphResponse}}, ) -async def global_query(request: GraphRequest): - # this is a slightly modified version of the graphrag.query.cli.run_global_search method - index_name = request.index_name - sanitized_index_name = sanitize_name(index_name) - - if not _is_index_complete(sanitized_index_name): +async def global_search(request: GraphGlobalRequest): + logger = load_pipeline_logger() + + if isinstance(request.index_name, list): + raise HTTPException( + status_code=501, + detail="Multi-index query is not implemented.", + ) + + # make sure all referenced indexes have completed + index_name_map = { + "index_name": request.index_name, + "sanitized_name": sanitize_name(request.index_name), + } + if not _is_index_complete(index_name_map['sanitized_name']): raise HTTPException( status_code=status.HTTP_425_TOO_EARLY, detail=f"{index_name} not ready for querying.", ) - COMMUNITY_REPORT_TABLE = "output/create_final_community_reports.parquet" - COMMUNITIES_TABLE = "output/create_final_communities.parquet" - ENTITIES_TABLE = "output/create_final_entities.parquet" - NODES_TABLE = "output/create_final_nodes.parquet" - - validate_index_file_exist(sanitized_index_name, COMMUNITY_REPORT_TABLE) - validate_index_file_exist(sanitized_index_name, ENTITIES_TABLE) - validate_index_file_exist(sanitized_index_name, NODES_TABLE) - - if isinstance(request.community_level, int): - COMMUNITY_LEVEL = request.community_level - else: - # Current investigations show that community level 1 is the most useful for global search. Set this as the default value - COMMUNITY_LEVEL = 1 - try: - # read the parquet files into DataFrames and add provenance information - community_report_table_path = ( - f"abfs://{sanitized_index_name}/{COMMUNITY_REPORT_TABLE}" + data_tables = get_data_tables( + index_name_map, + community_level=request.community_level, + include_local_context=False ) - communities_table_path = f"abfs://{sanitized_index_name}/{COMMUNITIES_TABLE}" - entities_table_path = f"abfs://{sanitized_index_name}/{ENTITIES_TABLE}" - nodes_table_path = f"abfs://{sanitized_index_name}/{NODES_TABLE}" - - # load parquet tables associated with the index - nodes_df = get_df(nodes_table_path) - community_reports_df = get_df(community_report_table_path) - communities_df = get_df(communities_table_path) - entities_df = get_df(entities_table_path) - - # load custom pipeline settings - ROOT_DIR = Path(__file__).resolve().parent.parent.parent - with (ROOT_DIR / "scripts/settings.yaml").open("r") as f: - data = yaml.safe_load(f) - - # layer the custom settings on top of the default configuration settings of graphrag - parameters = create_graphrag_config(data, ".") # perform async search - result = await global_search( - config=parameters, - nodes=nodes_df, - entities=entities_df, - communities=communities_df, - community_reports=community_reports_df, - community_level=COMMUNITY_LEVEL, - dynamic_community_selection=False, - response_type="Multiple Paragraphs", + result = await graphrag_global_search( + config=data_tables.config, + communities=data_tables.communities, + entities=data_tables.entities, + community_reports=data_tables.community_reports, + community_level=data_tables.community_level, + dynamic_community_selection=request.dynamic_community_selection, + response_type=request.response_type, query=request.query, ) + context = update_multi_index_context_data( + result[1], + index_name=index_name_map['index_name'], + index_id=index_name_map['sanitized_name'] + ) - return GraphResponse(result=result[0], context_data=result[1]) + return GraphResponse(result=result[0], context_data=context) except Exception as e: - logger = load_pipeline_logger() logger.error( message="Could not perform global search.", cause=e, - stack=traceback.format_exc(), + stack=traceback.format_exc() ) raise HTTPException(status_code=500, detail=None) @@ -125,87 +105,122 @@ async def global_query(request: GraphRequest): response_model=GraphResponse, responses={status.HTTP_200_OK: {"model": GraphResponse}}, ) -async def local_query(request: GraphRequest): - index_name = request.index_name - sanitized_index_name = sanitize_name(index_name) - - if not _is_index_complete(sanitized_index_name): +async def local_search(request: GraphRequest): + logger = load_pipeline_logger() + + if isinstance(request.index_name, list): + raise HTTPException( + status_code=501, + detail="Multi-index query is not implemented.", + ) + + # make sure all referenced indexes have completed + index_name_map = { + "index_name": request.index_name, + "sanitized_name": sanitize_name(request.index_name), + } + if not _is_index_complete(index_name_map['sanitized_name']): raise HTTPException( status_code=status.HTTP_425_TOO_EARLY, detail=f"{index_name} not ready for querying.", ) - azure_client_manager = AzureClientManager() - blob_service_client = azure_client_manager.get_blob_service_client() - - COMMUNITY_REPORT_TABLE = "output/create_final_community_reports.parquet" - COVARIATES_TABLE = "output/create_final_covariates.parquet" - ENTITIES_TABLE = "output/create_final_entities.parquet" - NODES_TABLE = "output/create_final_nodes.parquet" - RELATIONSHIPS_TABLE = "output/create_final_relationships.parquet" - TEXT_UNITS_TABLE = "output/create_final_text_units.parquet" - - if isinstance(request.community_level, int): - COMMUNITY_LEVEL = request.community_level - else: - # Current investigations show that community level 2 is the most useful for local search. Set this as the default value - COMMUNITY_LEVEL = 2 - - # check for existence of files the query relies on to validate the index is complete - validate_index_file_exist(sanitized_index_name, COMMUNITY_REPORT_TABLE) - validate_index_file_exist(sanitized_index_name, ENTITIES_TABLE) - validate_index_file_exist(sanitized_index_name, NODES_TABLE) - validate_index_file_exist(sanitized_index_name, RELATIONSHIPS_TABLE) - validate_index_file_exist(sanitized_index_name, TEXT_UNITS_TABLE) - - community_report_table_path = ( - f"abfs://{sanitized_index_name}/{COMMUNITY_REPORT_TABLE}" - ) - covariates_table_path = f"abfs://{sanitized_index_name}/{COVARIATES_TABLE}" - entities_table_path = f"abfs://{sanitized_index_name}/{ENTITIES_TABLE}" - nodes_table_path = f"abfs://{sanitized_index_name}/{NODES_TABLE}" - relationships_table_path = f"abfs://{sanitized_index_name}/{RELATIONSHIPS_TABLE}" - text_units_table_path = f"abfs://{sanitized_index_name}/{TEXT_UNITS_TABLE}" - - nodes_df = get_df(nodes_table_path) - community_reports_df = get_df(community_report_table_path) - entities_df = get_df(entities_table_path) - relationships_df = get_df(relationships_table_path) - text_units_df = get_df(text_units_table_path) - - # If present, prepare each index's covariates dataframe for merging - index_container_client = blob_service_client.get_container_client( - sanitized_index_name - ) - covariates_df = None - if index_container_client.get_blob_client(COVARIATES_TABLE).exists(): - covariates_df = get_df(covariates_table_path) - - # load custom pipeline settings - ROOT_DIR = Path(__file__).resolve().parent.parent.parent - with (ROOT_DIR / "scripts/settings.yaml").open("r") as f: - data = yaml.safe_load(f) - - # layer the custom settings on top of the default configuration settings of graphrag - parameters = create_graphrag_config(data, ".") - # add index_names to vector_store args - parameters.embeddings.vector_store["collection_name"] = sanitized_index_name - - # perform async search - result = await local_search( - config=parameters, - nodes=nodes_df, - entities=entities_df, - community_reports=community_reports_df, - text_units=text_units_df, - relationships=relationships_df, - covariates=covariates_df, - community_level=COMMUNITY_LEVEL, - response_type="Multiple Paragraphs", - query=request.query, - ) - - return GraphResponse(result=result[0], context_data=result[1]) + try: + data_tables = get_data_tables( + index_name_map, + community_level=request.community_level, + include_local_context=True + ) + + # perform async search + result = await graphrag_local_search( + config=data_tables.config, + entities=data_tables.entities, + community_reports=data_tables.community_reports, + communities=data_tables.communities, + text_units=data_tables.text_units, + relationships=data_tables.relationships, + covariates=data_tables.covariates, + community_level=data_tables.community_level, + response_type=request.response_type, + query=request.query, + ) + context = update_multi_index_context_data( + result[1], + index_name=index_name_map['index_name'], + index_id=index_name_map['sanitized_name'] + ) + + return GraphResponse(result=result[0], context_data=context) + except Exception as e: + logger.error( + message="Could not perform local search.", + cause=e, + stack=traceback.format_exc() + ) + raise HTTPException(status_code=500, detail=None) + + +@query_route.post( + "/drift", + summary="Perform a drift (Dynamic Reasoning and Inference with Flexible Traversal) search across the knowledge graph index", + description="DRIFT search offers a new approach to local search queries by incorporating community information, greatly expanding the range of facts retrieved for the final answer. This approach extends the GraphRAG query engine by adding a more comprehensive local search option that leverages community insights to refine queries into detailed follow-up questions. While resource-intensive, DRIFT search typically delivers the most accurate responses for queries that demand both a broad understanding of the entire dataset and deeper semantic knowledge about specific details.", + response_model=GraphResponse, + responses={200: {"model": GraphResponse}}, +) +async def drift_search(request: GraphRequest): + logger = load_pipeline_logger() + + if isinstance(request.index_name, list): + raise HTTPException( + status_code=501, + detail="Multi-index query is not implemented.", + ) + + # make sure all referenced indexes have completed + index_name_map = { + "index_name": request.index_name, + "sanitized_name": sanitize_name(request.index_name), + } + if not _is_index_complete(index_name_map['sanitized_name']): + raise HTTPException( + status_code=500, + detail=f"{index_name_map['index_name']} not ready for querying.", + ) + + try: + data_tables = get_data_tables( + index_name_map, + community_level=request.community_level, + include_local_context=True + ) + + # perform async search + result = await graphrag_drift_search( + config=data_tables.config, + entities=data_tables.entities, + community_reports=data_tables.community_reports, + communities=data_tables.communities, + text_units=data_tables.text_units, + relationships=data_tables.relationships, + community_level=data_tables.community_level, + response_type=request.response_type, + query=request.query, + ) + context = update_multi_index_context_data( + result[1], + index_name=index_name_map['index_name'], + index_id=index_name_map['sanitized_name'] + ) + + return GraphResponse(result=result[0], context_data=context) + except Exception as e: + logger.error( + message="Could not perform drift search.", + cause=e, + stack=traceback.format_exc() + ) + raise HTTPException(status_code=500, detail=None) def _is_index_complete(index_name: str) -> bool: @@ -227,4 +242,4 @@ def _is_index_complete(index_name: str) -> bool: pipeline_job = PipelineJob.load_item(index_name) if PipelineJobState(pipeline_job.status) == PipelineJobState.COMPLETE: return True - return False + return False \ No newline at end of file diff --git a/backend/graphrag_app/api/query_streaming.py b/backend/graphrag_app/api/query_streaming.py index bef5511b..0e346926 100644 --- a/backend/graphrag_app/api/query_streaming.py +++ b/backend/graphrag_app/api/query_streaming.py @@ -1,13 +1,10 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -import inspect import json import os import traceback -import pandas as pd -import yaml from fastapi import ( APIRouter, Depends, @@ -16,26 +13,30 @@ ) from fastapi.responses import StreamingResponse from graphrag.api.query import ( - global_search_streaming as global_search_streaming_internal, + drift_search_streaming as graphrag_drift_search_streaming, ) from graphrag.api.query import ( - local_search_streaming as local_search_streaming_internal, + global_search_streaming as graphrag_global_search_streaming, +) +from graphrag.api.query import ( + local_search_streaming as graphrag_local_search_streaming, ) -from graphrag.config import create_graphrag_config from graphrag_app.api.query import _is_index_complete from graphrag_app.logger.load_logger import load_pipeline_logger -from graphrag_app.typing.models import GraphRequest -from graphrag_app.utils.azure_clients import AzureClientManager +from graphrag_app.typing.models import ( + GraphDriftRequest, + GraphGlobalRequest, + GraphLocalRequest, + StreamingCallback, +) from graphrag_app.utils.common import ( - get_df, + get_data_tables, sanitize_name, subscription_key_check, - validate_index_file_exist, + update_multi_index_context_data, ) -from .query import _get_embedding_description_store, _update_context - query_streaming_route = APIRouter( prefix="/query/streaming", tags=["Query Streaming Operations"], @@ -50,150 +51,52 @@ description="The global query method generates answers by searching over all AI-generated community reports in a map-reduce fashion. This is a resource-intensive method, but often gives good responses for questions that require an understanding of the dataset as a whole.", status_code=status.HTTP_200_OK, ) -async def global_search_streaming(request: GraphRequest): - # this is a slightly modified version of graphrag_app.api.query.global_query() method - if isinstance(request.index_name, str): - index_names = [request.index_name] - else: - index_names = request.index_name - sanitized_index_names = [sanitize_name(name) for name in index_names] - sanitized_index_names_link = { - s: i for s, i in zip(sanitized_index_names, index_names) - } - - for index_name in sanitized_index_names: - if not _is_index_complete(index_name): - raise HTTPException( - status_code=500, - detail=f"{sanitized_index_names_link[index_name]} not ready for querying.", - ) - - COMMUNITY_REPORT_TABLE = "output/create_final_community_reports.parquet" - ENTITIES_TABLE = "output/create_final_entities.parquet" - NODES_TABLE = "output/create_final_nodes.parquet" - - if isinstance(request.community_level, int): - COMMUNITY_LEVEL = request.community_level - else: - # Current investigations show that community level 1 is the most useful for global search. Set this as the default value - COMMUNITY_LEVEL = 1 - - for index_name in sanitized_index_names: - validate_index_file_exist(index_name, COMMUNITY_REPORT_TABLE) - validate_index_file_exist(index_name, ENTITIES_TABLE) - validate_index_file_exist(index_name, NODES_TABLE) +async def global_search_streaming(request: GraphGlobalRequest): + logger = load_pipeline_logger() try: - links = { - "nodes": {}, - "community": {}, - "entities": {}, - "text_units": {}, - "relationships": {}, - "covariates": {}, - } - max_vals = { - "nodes": -1, - "community": -1, - "entities": -1, - "text_units": -1, - "relationships": -1, - "covariates": -1, + if isinstance(request.index_name, list): + raise HTTPException( + status_code=501, + detail="Multi-index query is not implemented.", + ) + + # make sure all referenced indexes have completed + index_name_map = { + "index_name": request.index_name, + "sanitized_name": sanitize_name(request.index_name), } - - community_dfs = [] - entities_dfs = [] - nodes_dfs = [] - - for index_name in sanitized_index_names: - community_report_table_path = ( - f"abfs://{index_name}/{COMMUNITY_REPORT_TABLE}" - ) - entities_table_path = f"abfs://{index_name}/{ENTITIES_TABLE}" - nodes_table_path = f"abfs://{index_name}/{NODES_TABLE}" - - # read parquet files into DataFrames and add provenance information - # note that nodes need to set before communities to that max community id makes sense - nodes_df = get_df(nodes_table_path) - for i in nodes_df["human_readable_id"]: - links["nodes"][i + max_vals["nodes"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": i, - } - if max_vals["nodes"] != -1: - nodes_df["human_readable_id"] += max_vals["nodes"] + 1 - nodes_df["community"] = nodes_df["community"].apply( - lambda x: str(int(x) + max_vals["community"] + 1) if x else x - ) - nodes_df["title"] = nodes_df["title"].apply(lambda x: x + f"-{index_name}") - nodes_df["source_id"] = nodes_df["source_id"].apply( - lambda x: ",".join([i + f"-{index_name}" for i in x.split(",")]) - ) - max_vals["nodes"] = nodes_df["human_readable_id"].max() - nodes_dfs.append(nodes_df) - - community_df = get_df(community_report_table_path) - for i in community_df["community"].astype(int): - links["community"][i + max_vals["community"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": str(i), - } - if max_vals["community"] != -1: - col = community_df["community"].astype(int) + max_vals["community"] + 1 - community_df["community"] = col.astype(str) - max_vals["community"] = community_df["community"].astype(int).max() - community_dfs.append(community_df) - - entities_df = get_df(entities_table_path) - for i in entities_df["human_readable_id"]: - links["entities"][i + max_vals["entities"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": i, - } - if max_vals["entities"] != -1: - entities_df["human_readable_id"] += max_vals["entities"] + 1 - entities_df["name"] = entities_df["name"].apply( - lambda x: x + f"-{index_name}" - ) - entities_df["text_unit_ids"] = entities_df["text_unit_ids"].apply( - lambda x: [i + f"-{index_name}" for i in x] + if not _is_index_complete(index_name_map['sanitized_name']): + raise HTTPException( + status_code=500, + detail=f"{index_name_map['index_name']} not ready for querying.", ) - max_vals["entities"] = entities_df["human_readable_id"].max() - entities_dfs.append(entities_df) - - # merge the dataframes - nodes_combined = pd.concat(nodes_dfs, axis=0, ignore_index=True, sort=False) - community_combined = pd.concat( - community_dfs, axis=0, ignore_index=True, sort=False - ) - entities_combined = pd.concat( - entities_dfs, axis=0, ignore_index=True, sort=False + + data_tables = get_data_tables( + index_name_map, + community_level=request.community_level, + include_local_context=False ) - - # load custom pipeline settings - this_directory = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe())) - ) - data = yaml.safe_load(open(f"{this_directory}/pipeline-settings.yaml")) - # layer the custom settings on top of the default configuration settings of graphrag - parameters = create_graphrag_config(data, ".") - + query_callback = StreamingCallback() return StreamingResponse( - _wrapper( - global_search_streaming_internal( - config=parameters, - nodes=nodes_combined, - entities=entities_combined, - community_reports=community_combined, - community_level=COMMUNITY_LEVEL, - response_type="Multiple Paragraphs", + _streaming_wrapper( + graphrag_global_search_streaming( + config=data_tables.config, + entities=data_tables.entities, + communities=data_tables.communities, + community_reports=data_tables.community_reports, + community_level=data_tables.community_level, + dynamic_community_selection=request.dynamic_community_selection, + response_type=request.response_type, query=request.query, + callbacks=[query_callback] ), - links, + index_name=index_name_map["index_name"], + index_id=index_name_map["sanitized_name"], + query_callback=query_callback ), media_type="application/json", ) except Exception as e: - logger = load_pipeline_logger() logger.error( message="Error encountered while streaming global search response", cause=e, @@ -208,251 +111,134 @@ async def global_search_streaming(request: GraphRequest): description="The local query method generates answers by combining relevant data from the AI-extracted knowledge-graph with text chunks of the raw documents. This method is suitable for questions that require an understanding of specific entities mentioned in the documents (e.g. What are the healing properties of chamomile?).", status_code=status.HTTP_200_OK, ) -async def local_search_streaming(request: GraphRequest): - # this is a slightly modified version of graphrag_app.api.query.local_query() method - if isinstance(request.index_name, str): - index_names = [request.index_name] - else: - index_names = request.index_name - sanitized_index_names = [sanitize_name(name) for name in index_names] - sanitized_index_names_link = { - s: i for s, i in zip(sanitized_index_names, index_names) - } - for index_name in sanitized_index_names: - if not _is_index_complete(index_name): +async def local_search_streaming(request: GraphLocalRequest): + logger = load_pipeline_logger() + try: + if isinstance(request.index_name, list): + raise HTTPException( + status_code=501, + detail="Multi-index query is not implemented.", + ) + + # make sure all referenced indexes have completed + index_name_map = { + "index_name": request.index_name, + "sanitized_name": sanitize_name(request.index_name), + } + if not _is_index_complete(index_name_map['sanitized_name']): raise HTTPException( status_code=500, - detail=f"{sanitized_index_names_link[index_name]} not ready for querying.", + detail=f"{index_name_map['index_name']} not ready for querying.", ) - azure_client_manager = AzureClientManager() - blob_service_client = azure_client_manager.get_blob_service_client() - - community_dfs = [] - covariates_dfs = [] - entities_dfs = [] - nodes_dfs = [] - relationships_dfs = [] - text_units_dfs = [] - links = { - "nodes": {}, - "community": {}, - "entities": {}, - "text_units": {}, - "relationships": {}, - "covariates": {}, - } - max_vals = { - "nodes": -1, - "community": -1, - "entities": -1, - "text_units": -1, - "relationships": -1, - "covariates": -1, - } - - COMMUNITY_REPORT_TABLE = "output/create_final_community_reports.parquet" - COVARIATES_TABLE = "output/create_final_covariates.parquet" - ENTITIES_TABLE = "output/create_final_entities.parquet" - NODES_TABLE = "output/create_final_nodes.parquet" - RELATIONSHIPS_TABLE = "output/create_final_relationships.parquet" - TEXT_UNITS_TABLE = "output/create_final_text_units.parquet" + + data_tables = get_data_tables( + index_name_map, + community_level=request.community_level, + include_local_context=True + ) + query_callback = StreamingCallback() + return StreamingResponse( + _streaming_wrapper( + graphrag_local_search_streaming( + config=data_tables.config, + entities=data_tables.entities, + community_reports=data_tables.community_reports, + communities=data_tables.communities, + text_units=data_tables.text_units, + relationships=data_tables.relationships, + covariates=data_tables.covariates, + community_level=data_tables.community_level, + response_type=request.response_type, + query=request.query, + callbacks=[query_callback] + ), + index_name=index_name_map["index_name"], + index_id=index_name_map["sanitized_name"], + query_callback=query_callback + ), + media_type="application/json", + ) + except Exception as e: + logger.error( + message="Error encountered while streaming local search response", + cause=e, + stack=traceback.format_exc(), + ) + raise HTTPException(status_code=500, detail=None) - if isinstance(request.community_level, int): - COMMUNITY_LEVEL = request.community_level - else: - # Current investigations show that community level 2 is the most useful for local search. Set this as the default value - COMMUNITY_LEVEL = 2 +@query_streaming_route.post( + "/drift", + summary="Perform a drift (Dynamic Reasoning and Inference with Flexible Traversal) search across the knowledge graph index", + description="DRIFT search offers a new approach to local search queries by incorporating community information, greatly expanding the range of facts retrieved for the final answer. This approach extends the GraphRAG query engine by adding a more comprehensive local search option that leverages community insights to refine queries into detailed follow-up questions. While resource-intensive, DRIFT search typically delivers the most accurate responses for queries that demand both a broad understanding of the entire dataset and deeper semantic knowledge about specific details.", +) +async def drift_search_streaming(request: GraphDriftRequest): + logger = load_pipeline_logger() try: - for index_name in sanitized_index_names: - # check for existence of files the query relies on to validate the index is complete - validate_index_file_exist(index_name, COMMUNITY_REPORT_TABLE) - validate_index_file_exist(index_name, ENTITIES_TABLE) - validate_index_file_exist(index_name, NODES_TABLE) - validate_index_file_exist(index_name, RELATIONSHIPS_TABLE) - validate_index_file_exist(index_name, TEXT_UNITS_TABLE) - - community_report_table_path = ( - f"abfs://{index_name}/{COMMUNITY_REPORT_TABLE}" - ) - covariates_table_path = f"abfs://{index_name}/{COVARIATES_TABLE}" - entities_table_path = f"abfs://{index_name}/{ENTITIES_TABLE}" - nodes_table_path = f"abfs://{index_name}/{NODES_TABLE}" - relationships_table_path = f"abfs://{index_name}/{RELATIONSHIPS_TABLE}" - text_units_table_path = f"abfs://{index_name}/{TEXT_UNITS_TABLE}" - - # read the parquet files into DataFrames and add provenance information - - # note that nodes need to set before communities to that max community id makes sense - nodes_df = get_df(nodes_table_path) - for i in nodes_df["human_readable_id"]: - links["nodes"][i + max_vals["nodes"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": i, - } - if max_vals["nodes"] != -1: - nodes_df["human_readable_id"] += max_vals["nodes"] + 1 - nodes_df["community"] = nodes_df["community"].apply( - lambda x: str(int(x) + max_vals["community"] + 1) if x else x - ) - nodes_df["id"] = nodes_df["id"].apply(lambda x: x + f"-{index_name}") - nodes_df["title"] = nodes_df["title"].apply(lambda x: x + f"-{index_name}") - nodes_df["source_id"] = nodes_df["source_id"].apply( - lambda x: ",".join([i + f"-{index_name}" for i in x.split(",")]) - ) - max_vals["nodes"] = nodes_df["human_readable_id"].max() - nodes_dfs.append(nodes_df) - - community_df = get_df(community_report_table_path) - for i in community_df["community"].astype(int): - links["community"][i + max_vals["community"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": str(i), - } - if max_vals["community"] != -1: - col = community_df["community"].astype(int) + max_vals["community"] + 1 - community_df["community"] = col.astype(str) - max_vals["community"] = community_df["community"].astype(int).max() - community_dfs.append(community_df) - - entities_df = get_df(entities_table_path) - for i in entities_df["human_readable_id"]: - links["entities"][i + max_vals["entities"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": i, - } - if max_vals["entities"] != -1: - entities_df["human_readable_id"] += max_vals["entities"] + 1 - entities_df["id"] = entities_df["id"].apply(lambda x: x + f"-{index_name}") - entities_df["name"] = entities_df["name"].apply( - lambda x: x + f"-{index_name}" - ) - entities_df["text_unit_ids"] = entities_df["text_unit_ids"].apply( - lambda x: [i + f"-{index_name}" for i in x] - ) - max_vals["entities"] = entities_df["human_readable_id"].max() - entities_dfs.append(entities_df) - - relationships_df = get_df(relationships_table_path) - for i in relationships_df["human_readable_id"].astype(int): - links["relationships"][i + max_vals["relationships"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": i, - } - if max_vals["relationships"] != -1: - col = ( - relationships_df["human_readable_id"].astype(int) - + max_vals["relationships"] - + 1 + if isinstance(request.index_name, list): + raise HTTPException( + status_code=501, + detail="Multi-index query is not implemented.", ) - relationships_df["human_readable_id"] = col.astype(str) - relationships_df["source"] = relationships_df["source"].apply( - lambda x: x + f"-{index_name}" - ) - relationships_df["target"] = relationships_df["target"].apply( - lambda x: x + f"-{index_name}" - ) - relationships_df["text_unit_ids"] = relationships_df["text_unit_ids"].apply( - lambda x: [i + f"-{index_name}" for i in x] - ) - max_vals["relationships"] = ( - relationships_df["human_readable_id"].astype(int).max() - ) - relationships_dfs.append(relationships_df) - - text_units_df = get_df(text_units_table_path) - text_units_df["id"] = text_units_df["id"].apply( - lambda x: f"{x}-{index_name}" - ) - text_units_dfs.append(text_units_df) - - index_container_client = blob_service_client.get_container_client( - index_name + + # make sure all referenced indexes have completed + index_name_map = { + "index_name": request.index_name, + "sanitized_name": sanitize_name(request.index_name), + } + if not _is_index_complete(index_name_map['sanitized_name']): + raise HTTPException( + status_code=500, + detail=f"{index_name_map['index_name']} not ready for querying.", ) - if index_container_client.get_blob_client(COVARIATES_TABLE).exists(): - covariates_df = get_df(covariates_table_path) - if i in covariates_df["human_readable_id"].astype(int): - links["covariates"][i + max_vals["covariates"] + 1] = { - "index_name": sanitized_index_names_link[index_name], - "id": i, - } - if max_vals["covariates"] != -1: - col = ( - covariates_df["human_readable_id"].astype(int) - + max_vals["covariates"] - + 1 - ) - covariates_df["human_readable_id"] = col.astype(str) - max_vals["covariates"] = ( - covariates_df["human_readable_id"].astype(int).max() - ) - covariates_dfs.append(covariates_df) - - nodes_combined = pd.concat(nodes_dfs, axis=0, ignore_index=True) - community_combined = pd.concat(community_dfs, axis=0, ignore_index=True) - entities_combined = pd.concat(entities_dfs, axis=0, ignore_index=True) - text_units_combined = pd.concat(text_units_dfs, axis=0, ignore_index=True) - relationships_combined = pd.concat(relationships_dfs, axis=0, ignore_index=True) - covariates_combined = ( - pd.concat(covariates_dfs, axis=0, ignore_index=True) - if covariates_dfs != [] - else None - ) - - # load custom pipeline settings - this_directory = os.path.dirname( - os.path.abspath(inspect.getfile(inspect.currentframe())) - ) - data = yaml.safe_load(open(f"{this_directory}/pipeline-settings.yaml")) - # layer the custom settings on top of the default configuration settings of graphrag - parameters = create_graphrag_config(data, ".") - - # add index_names to vector_store args - parameters.embeddings.vector_store["index_names"] = sanitized_index_names - # internally write over the get_embedding_description_store - # method to use the multi-index collection. - import graphrag.api.query - - graphrag.api.query._get_embedding_description_store = ( - _get_embedding_description_store + + data_tables = get_data_tables( + index_name_map, + community_level=request.community_level, + include_local_context=True ) - - # perform streaming local search + query_callback = StreamingCallback() return StreamingResponse( - _wrapper( - local_search_streaming_internal( - config=parameters, - nodes=nodes_combined, - entities=entities_combined, - community_reports=community_combined, - text_units=text_units_combined, - relationships=relationships_combined, - covariates=covariates_combined, - community_level=COMMUNITY_LEVEL, - response_type="Multiple Paragraphs", + _streaming_wrapper( + graphrag_drift_search_streaming( + config=data_tables.config, + entities=data_tables.entities, + community_reports=data_tables.community_reports, + communities=data_tables.communities, + text_units=data_tables.text_units, + relationships=data_tables.relationships, + community_level=data_tables.community_level, + response_type=request.response_type, query=request.query, + callbacks=[query_callback] ), - links, + index_name=index_name_map["index_name"], + index_id=index_name_map["sanitized_name"], + query_callback=query_callback ), media_type="application/json", ) except Exception as e: - logger = load_pipeline_logger() logger.error( - message="Error encountered while streaming local search response", + message="Error encountered while streaming drift search response", cause=e, stack=traceback.format_exc(), ) raise HTTPException(status_code=500, detail=None) -async def _wrapper(x, links): - context = None - async for i in x: - if context: - yield json.dumps({"token": i, "context": None}).encode("utf-8") + b"\n" - else: - context = i - context = _update_context(context, links) - context = json.dumps({"token": "", "context": context}).encode("utf-8") + b"\n" - yield context +async def _streaming_wrapper(token_iter, index_name: str, index_id: str, query_callback: StreamingCallback): + async for token in token_iter: + yield json.dumps( + { + "token": token, + "context": None + } + ).encode("utf-8") + b"\n" + yield json.dumps( + { + "token": "", + "context": update_multi_index_context_data(query_callback.context, index_name, index_id) + } + ).encode("utf-8") + b"\n" + \ No newline at end of file diff --git a/backend/graphrag_app/api/source.py b/backend/graphrag_app/api/source.py index 9692e239..5b4424b3 100644 --- a/backend/graphrag_app/api/source.py +++ b/backend/graphrag_app/api/source.py @@ -35,13 +35,13 @@ if os.getenv("KUBERNETES_SERVICE_HOST"): source_route.dependencies.append(Depends(subscription_key_check)) - -COMMUNITY_REPORT_TABLE = "output/create_final_community_reports.parquet" -COVARIATES_TABLE = "output/create_final_covariates.parquet" -ENTITY_EMBEDDING_TABLE = "output/create_final_entities.parquet" -RELATIONSHIPS_TABLE = "output/create_final_relationships.parquet" -TEXT_UNITS_TABLE = "output/create_final_text_units.parquet" -DOCUMENTS_TABLE = "output/create_final_documents.parquet" +COMMUNITY_TABLE = "output/communities.parquet" +COMMUNITY_REPORT_TABLE = "output/community_reports.parquet" +COVARIATES_TABLE = "output/covariates.parquet" +ENTITIES_TABLE = "output/entities.parquet" +RELATIONSHIPS_TABLE = "output/relationships.parquet" +TEXT_UNITS_TABLE = "output/text_units.parquet" +DOCUMENTS_TABLE = "output/documents.parquet" @source_route.get( @@ -96,7 +96,7 @@ async def get_report_info( responses={status.HTTP_200_OK: {"model": TextUnitResponse}}, ) async def get_chunk_info( - text_unit_id: str, + text_unit_id: int, container_name: str, sanitized_container_name: str = Depends(sanitize_name), ): @@ -108,32 +108,51 @@ async def get_chunk_info( f"abfs://{sanitized_container_name}/{TEXT_UNITS_TABLE}", storage_options=pandas_storage_options(), ) + text_units_filter = text_units["human_readable_id"].isin([text_unit_id]) + + # verify that text_unit_id exists in the index + if not text_units_filter.any(): + raise ValueError( + f"Text unit '{text_unit_id}' not found in index '{container_name}'." + ) + + # explode the 'document_ids' column so the format matches with 'document_id' + text_units = text_units[text_units_filter].explode("document_ids") + docs = pd.read_parquet( f"abfs://{sanitized_container_name}/{DOCUMENTS_TABLE}", storage_options=pandas_storage_options(), ) # rename columns for easy joining - docs = docs[["id", "title"]].rename( - columns={"id": "document_id", "title": "source_document"} + docs = docs[ + [ + "id", "title", "human_readable_id" + ] + ].rename( + columns={ + "id": "document_id", + "title": "source_document", + "human_readable_id": "document_human_readable_id" + } ) - # explode the 'document_ids' column so the format matches with 'document_id' - text_units = text_units.explode("document_ids") - - # verify that text_unit_id exists in the index - if not text_units["id"].isin([text_unit_id]).any(): - raise ValueError( - f"Text unit '{text_unit_id}' not found in index '{container_name}'." - ) # combine tables to create a (chunk_id -> source_document) mapping merged_table = text_units.merge( docs, left_on="document_ids", right_on="document_id", how="left" ) row = merged_table.loc[ - merged_table["id"] == text_unit_id, ["id", "source_document"] + merged_table["human_readable_id"] == text_unit_id, + [ + "text", + "source_document", + "human_readable_id", + "document_human_readable_id" + ] ] return TextUnitResponse( - text=row["id"].to_numpy()[0], + text_unit_id=row["human_readable_id"].to_numpy()[0], + source_document_id=row["document_human_readable_id"].to_numpy()[0], + text=row["text"].to_numpy()[0], source_document=row["source_document"].to_numpy()[0], ) except Exception as e: @@ -161,10 +180,14 @@ async def get_entity_info( sanitized_container_name: str = Depends(sanitize_name), ): # check for existence of file the query relies on to validate the index is complete - validate_index_file_exist(sanitized_container_name, ENTITY_EMBEDDING_TABLE) + validate_index_file_exist(sanitized_container_name, ENTITIES_TABLE) try: entity_table = pd.read_parquet( - f"abfs://{sanitized_container_name}/{ENTITY_EMBEDDING_TABLE}", + f"abfs://{sanitized_container_name}/{ENTITIES_TABLE}", + storage_options=pandas_storage_options(), + ) + text_units = pd.read_parquet( + f"abfs://{sanitized_container_name}/{TEXT_UNITS_TABLE}", storage_options=pandas_storage_options(), ) # check if entity_id exists in the index @@ -173,10 +196,14 @@ async def get_entity_info( f"Entity '{entity_id}' not found in index '{container_name}'." ) row = entity_table[entity_table["human_readable_id"] == entity_id] + text_unit_human_readable_ids = text_units[ + text_units["id"].isin(row["text_unit_ids"].to_numpy()[0].tolist()) + ]["human_readable_id"].to_list() return EntityResponse( name=row["title"].to_numpy()[0], + type=row["type"].to_numpy()[0], description=row["description"].to_numpy()[0], - text_units=row["text_unit_ids"].to_numpy()[0].tolist(), + text_units=text_unit_human_readable_ids, ) except Exception as e: logger = load_pipeline_logger() @@ -254,32 +281,38 @@ async def get_relationship_info( ): # check for existence of file the query relies on to validate the index is complete validate_index_file_exist(sanitized_container_name, RELATIONSHIPS_TABLE) - validate_index_file_exist(sanitized_container_name, ENTITY_EMBEDDING_TABLE) + validate_index_file_exist(sanitized_container_name, ENTITIES_TABLE) try: relationship_table = pd.read_parquet( f"abfs://{sanitized_container_name}/{RELATIONSHIPS_TABLE}", storage_options=pandas_storage_options(), ) + relationship_table_row = relationship_table[ + relationship_table.human_readable_id == relationship_id + ] + entity_table = pd.read_parquet( - f"abfs://{sanitized_container_name}/{ENTITY_EMBEDDING_TABLE}", + f"abfs://{sanitized_container_name}/{ENTITIES_TABLE}", storage_options=pandas_storage_options(), ) - row = relationship_table[ - relationship_table.human_readable_id == relationship_id - ] + text_units = pd.read_parquet( + f"abfs://{sanitized_container_name}/{TEXT_UNITS_TABLE}", + storage_options=pandas_storage_options(), + ) + text_unit_ids = text_units[text_units["id"].isin( + relationship_table_row["text_unit_ids"].values[0] + )]["human_readable_id"] return RelationshipResponse( - source=row["source"].values[0], + source=relationship_table_row["source"].values[0], source_id=entity_table[ - entity_table.title == row["source"].values[0] + entity_table.title == relationship_table_row["source"].values[0] ].human_readable_id.values[0], - target=row["target"].values[0], + target=relationship_table_row["target"].values[0], target_id=entity_table[ - entity_table.title == row["target"].values[0] + entity_table.title == relationship_table_row["target"].values[0] ].human_readable_id.values[0], - description=row["description"].values[0], - text_units=[ - x[0] for x in row["text_unit_ids"].to_list() - ], # extract text_unit_ids from a list of panda series + description=relationship_table_row["description"].values[0], + text_units=text_unit_ids.to_list(), # extract text_unit_ids from a list of panda series ) except Exception as e: logger = load_pipeline_logger() diff --git a/backend/graphrag_app/logger/__init__.py b/backend/graphrag_app/logger/__init__.py index 5f1cc6db..c2905118 100644 --- a/backend/graphrag_app/logger/__init__.py +++ b/backend/graphrag_app/logger/__init__.py @@ -9,16 +9,16 @@ from graphrag_app.logger.pipeline_job_updater import PipelineJobUpdater from graphrag_app.logger.typing import ( Logger, - PipelineAppInsightsReportingConfig, - PipelineReportingConfigTypes, + PipelineAppInsightsLogger, + # PipelineReportingConfigTypes, ) __all__ = [ "Logger", "ApplicationInsightsWorkflowCallbacks", "ConsoleWorkflowCallbacks", - "PipelineAppInsightsReportingConfig", + "PipelineAppInsightsLogger", "PipelineJobUpdater", - "PipelineReportingConfigTypes", + # "PipelineReportingConfigTypes", "load_pipeline_logger", ] diff --git a/backend/graphrag_app/logger/application_insights_workflow_callbacks.py b/backend/graphrag_app/logger/application_insights_workflow_callbacks.py index 03f7c6b8..18d84f46 100644 --- a/backend/graphrag_app/logger/application_insights_workflow_callbacks.py +++ b/backend/graphrag_app/logger/application_insights_workflow_callbacks.py @@ -136,6 +136,9 @@ def error( details: Optional[dict] = {}, ) -> None: """A call back handler for when an error occurs.""" + if details is None: + details = {} + details = {"cause": str(cause), "stack": stack, **details} self._logger.error( message, diff --git a/backend/graphrag_app/logger/load_logger.py b/backend/graphrag_app/logger/load_logger.py index eea0a34c..869bf15a 100644 --- a/backend/graphrag_app/logger/load_logger.py +++ b/backend/graphrag_app/logger/load_logger.py @@ -2,7 +2,7 @@ # Licensed under the MIT License. import os -from pathlib import Path +from pathlib import PurePosixPath from typing import List from graphrag.callbacks.file_workflow_callbacks import FileWorkflowCallbacks @@ -44,7 +44,7 @@ def load_pipeline_logger( log_blob_name = os.path.join(logging_dir, log_blob_name) # ensure the root directory exists; if not, create it blob_service_client = azure_client_manager.get_blob_service_client() - container_root = Path(log_blob_name).parts[0] + container_root = PurePosixPath(log_blob_name).parts[0] if not blob_service_client.get_container_client( container_root ).exists(): diff --git a/backend/graphrag_app/logger/typing.py b/backend/graphrag_app/logger/typing.py index 533e535d..e970a512 100644 --- a/backend/graphrag_app/logger/typing.py +++ b/backend/graphrag_app/logger/typing.py @@ -5,10 +5,7 @@ from enum import Enum from typing import Literal -from graphrag.index.config.reporting import ( - PipelineReportingConfig, - PipelineReportingConfigTypes, -) +from graphrag.logger.base import StatusLogger from pydantic import Field as pydantic_Field @@ -19,9 +16,7 @@ class Logger(Enum): APP_INSIGHTS = (4, "app_insights") -class PipelineAppInsightsReportingConfig( - PipelineReportingConfig[Literal["app_insights"]] -): +class PipelineAppInsightsLogger(StatusLogger): """Represents the ApplicationInsights reporting configuration for the pipeline.""" type: Literal["app_insights"] = Logger.APP_INSIGHTS.name.lower() @@ -45,6 +40,6 @@ class PipelineAppInsightsReportingConfig( # add the new type to the existing PipelineReportingConfigTypes -PipelineReportingConfigTypes = ( - PipelineReportingConfigTypes | PipelineAppInsightsReportingConfig -) +# StatusLogger = ( +# StatusLogger | PipelineAppInsightsReportingConfig +# ) diff --git a/backend/graphrag_app/typing/models.py b/backend/graphrag_app/typing/models.py index 229356a2..f8fb3a35 100644 --- a/backend/graphrag_app/typing/models.py +++ b/backend/graphrag_app/typing/models.py @@ -1,11 +1,15 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +from io import StringIO from typing import ( Any, List, + Optional, ) +import pandas as pd +from graphrag.callbacks.query_callbacks import QueryCallbacks from pydantic import BaseModel @@ -26,14 +30,32 @@ class ClaimResponse(BaseModel): class EntityResponse(BaseModel): name: str + type: str description: str - text_units: list[str] + text_units: list[int] -class GraphRequest(BaseModel): +class IndexingConfigs(BaseModel): + index_name: str + + +class GraphRequest(IndexingConfigs): index_name: str query: str community_level: int | None = None + response_type: str = "Multiple Paragraphs" + + +class GraphGlobalRequest(GraphRequest): + dynamic_community_selection: bool = False + + +class GraphLocalRequest(GraphRequest): + conversation_history_max_turns: int = 5 + + +class GraphDriftRequest(GraphRequest): + conversation_history_max_turns: int = 5 class GraphResponse(BaseModel): @@ -69,7 +91,36 @@ class RelationshipResponse(BaseModel): target: str target_id: int description: str - text_units: list[str] + text_units: list[int] + + +class QueryData(BaseModel): + class Config: + arbitrary_types_allowed = True + + communities: pd.DataFrame + community_reports: pd.DataFrame + entities: pd.DataFrame + text_units: Optional[pd.DataFrame] = None + relationships: Optional[pd.DataFrame] = None + covariates: Optional[pd.DataFrame] = None + community_level: Optional[int] = 1 + config: Optional[Any] = None + + +class StreamingCallback(QueryCallbacks): + context: Optional[Any] = None + response: Optional[StringIO] = StringIO() + + def on_context(self, context) -> None: + """Handle when context data is constructed.""" + super().on_context(context) + self.context = context + + def on_llm_new_token(self, token) -> None: + """Handle when a new token is generated.""" + super().on_llm_new_token(token) + self.response.write(token) class StorageNameList(BaseModel): @@ -77,5 +128,7 @@ class StorageNameList(BaseModel): class TextUnitResponse(BaseModel): + text_unit_id: int text: str source_document: str + source_document_id: int diff --git a/backend/graphrag_app/utils/azure_clients.py b/backend/graphrag_app/utils/azure_clients.py index 03bc84b3..d6d92b7f 100644 --- a/backend/graphrag_app/utils/azure_clients.py +++ b/backend/graphrag_app/utils/azure_clients.py @@ -2,6 +2,7 @@ # Licensed under the MIT License. import os +from pathlib import PurePosixPath from azure.cosmos import ( ContainerProxy, @@ -115,6 +116,9 @@ def __init__(self) -> None: _BlobServiceClientSingletonAsync.get_instance() ) + # parse account hostname from the azure storage connection string or blob url + self.storage_account_hostname = PurePosixPath(self.storage_blob_url).parts[1] + # parse account name from the azure storage connection string or blob url if self.storage_connection_string: meta_info = {} @@ -127,12 +131,7 @@ def __init__(self) -> None: meta_info[m[0]] = m[1] self.storage_account_name = meta_info["AccountName"] else: - self.storage_account_name = self.storage_blob_url.split("//")[1].split(".")[ - 0 - ] - - # parse account hostname from the azure storage connection string or blob url - self.storage_account_hostname = self._blob_service_client.url.split("//")[1] + self.storage_account_name = self.storage_account_hostname.split(".")[0] def get_blob_service_client(self) -> BlobServiceClient: """ diff --git a/backend/graphrag_app/utils/common.py b/backend/graphrag_app/utils/common.py index 6d002b3b..23c56db4 100644 --- a/backend/graphrag_app/utils/common.py +++ b/backend/graphrag_app/utils/common.py @@ -7,9 +7,11 @@ import csv import hashlib import os +import sys import traceback from io import StringIO -from typing import Annotated, Tuple +from pathlib import Path +from typing import Annotated, Dict, Tuple import pandas as pd from azure.core.exceptions import ResourceNotFoundError @@ -17,8 +19,11 @@ from azure.identity import DefaultAzureCredential from azure.storage.blob.aio import ContainerClient from fastapi import Header, HTTPException, status +from graphrag.config.load_config import load_config +from graphrag.config.models.graph_rag_config import GraphRagConfig from graphrag_app.logger.load_logger import load_pipeline_logger +from graphrag_app.typing.models import QueryData from graphrag_app.utils.azure_clients import AzureClientManager FILE_UPLOAD_CACHE = "cache/uploaded_files.csv" @@ -197,7 +202,7 @@ def desanitize_name(sanitized_container_name: str) -> str | None: try: return container_store_client.read_item( sanitized_container_name, sanitized_container_name - )["human_readable_name"] + )["human_readable_index_name"] except exceptions.CosmosResourceNotFoundError: return None except Exception: @@ -223,6 +228,170 @@ async def subscription_key_check( return Ocp_Apim_Subscription_Key +def get_data_tables( + index_names: Dict[str, str], + community_level: int = -1, + include_local_context: bool = True + ) -> QueryData: + """ + Get the data tables for the given index names. + + Args: + index_names (str | List[str]): The index names. + + Returns: + QueryData: The data objects for the given index names. + """ + logger = load_pipeline_logger() + + COMMUNITY_TABLE = "output/communities.parquet" + COMMUNITY_REPORT_TABLE = "output/community_reports.parquet" + COVARIATES_TABLE = "output/covariates.parquet" + ENTITIES_TABLE = "output/entities.parquet" + RELATIONSHIPS_TABLE = "output/relationships.parquet" + TEXT_UNITS_TABLE = "output/text_units.parquet" + + if isinstance(community_level, int): + COMMUNITY_LEVEL = community_level + elif isinstance(community_level, float): + COMMUNITY_LEVEL = int(community_level) + else: + # community level 1 is best for local and drift search, level 2 is best got global search + COMMUNITY_LEVEL = 1 if include_local_context else 2 + + if COMMUNITY_LEVEL == -1: + # get all available communities when the community level is set to -1 + COMMUNITY_LEVEL = sys.maxsize # get the largest possible integer in python + + sanitized_name = index_names["sanitized_name"] + + # check for existence of files the query relies on to validate the index is complete + validate_index_file_exist(sanitized_name, COMMUNITY_TABLE) + validate_index_file_exist(sanitized_name, COMMUNITY_REPORT_TABLE) + validate_index_file_exist(sanitized_name, ENTITIES_TABLE) + validate_index_file_exist(sanitized_name, RELATIONSHIPS_TABLE) + validate_index_file_exist(sanitized_name, TEXT_UNITS_TABLE) + + # load community reports data + communities_df = get_df(f"abfs://{sanitized_name}/{COMMUNITY_TABLE}") + communities_df[communities_df.level <= COMMUNITY_LEVEL] + + # load community reports data + community_report_df = get_df(f"abfs://{sanitized_name}/{COMMUNITY_REPORT_TABLE}") + community_report_df[community_report_df.level <= COMMUNITY_LEVEL] + + entities_df = get_df(f"abfs://{sanitized_name}/{ENTITIES_TABLE}") + + if include_local_context: + # we only need to get these tables when we are not doing a global query + text_units_df = get_df(f"abfs://{sanitized_name}/{TEXT_UNITS_TABLE}") + relationships_df = get_df(f"abfs://{sanitized_name}/{RELATIONSHIPS_TABLE}") + covariates_df = None + try: + covariates_df = get_df(f"abfs://{sanitized_name}/{COVARIATES_TABLE}") + except Exception as e: + logger.warning(f"Covariates table not found: {e}") + + # load custom pipeline settings + ROOT_DIR = Path(__file__).resolve().parent.parent.parent / "scripts/settings.yaml" + + # layer the custom settings on top of the default configuration settings of graphrag + config: GraphRagConfig = load_config( + root_dir=ROOT_DIR.parent, + config_filepath=ROOT_DIR + ) + # update the config to use the correct blob storage containers + config.cache.container_name = index_names["sanitized_name"] + config.reporting.container_name = index_names["sanitized_name"] + config.output.container_name = index_names["sanitized_name"] + # dynamically assign the sanitized index name + config.vector_store["default_vector_store"].container_name = sanitized_name + + data = QueryData( + communities=communities_df, + community_reports=community_report_df, + entities=entities_df, + community_level=COMMUNITY_LEVEL, + config=config, + ) + if include_local_context: + # add local context to the data object + data.text_units = text_units_df + data.relationships = relationships_df + data.covariates = covariates_df + return data + + +def update_multi_index_context_data( + context_data, + index_name: str, + index_id: str, +): + """ + Update context data with the links dict so that it contains both the index name and community id. + + Parameters + ---------- + - context_data (str | list[pd.DataFrame] | dict[str, pd.DataFrame]): The context data to update. + - index_name (str): The name of the index. + - index_id (str): The id of the index. + + Returns + ------- + str | list[pd.DataFrame] | dict[str, pd.DataFrame]: The updated context data. + """ + updated_context_data = {} + for key in context_data: + updated_entry = [] + if key == "reports": + updated_entry = [ + { + **entry, + "index_name": index_name, + "index_id": index_id, + } + for entry in context_data[key].to_dict(orient="records") + ] + if key == "entities": + updated_entry = [ + { + **entry, + "index_name": index_name, + "index_id": index_id, + } + for entry in context_data[key].to_dict(orient="records") + ] + if key == "relationships": + updated_entry = [ + { + **entry, + "index_name": index_name, + "index_id": index_id, + } + for entry in context_data[key].to_dict(orient="records") + ] + if key == "claims": + updated_entry = [ + { + **entry, + "index_name": index_name, + "index_id": index_id, + } + for entry in context_data[key].to_dict(orient="records") + ] + if key == "sources": + updated_entry = [ + { + **entry, + "index_name": index_name, + "index_id": index_id, + } + for entry in context_data[key].to_dict(orient="records") + ] + updated_context_data[key] = updated_entry + return updated_context_data + + async def create_cache(container_client: ContainerClient) -> None: """ Create a file cache (csv). diff --git a/backend/graphrag_app/utils/pipeline.py b/backend/graphrag_app/utils/pipeline.py index 1e1b3ab1..0236327a 100644 --- a/backend/graphrag_app/utils/pipeline.py +++ b/backend/graphrag_app/utils/pipeline.py @@ -7,6 +7,7 @@ ) from azure.cosmos.exceptions import CosmosHttpResponseError +from graphrag.config.enums import IndexingMethod from graphrag_app.typing.pipeline import PipelineJobState from graphrag_app.utils.azure_clients import AzureClientManager @@ -39,7 +40,9 @@ class PipelineJob: _entity_extraction_prompt: str = field(default=None, init=False) _entity_summarization_prompt: str = field(default=None, init=False) - _community_summarization_prompt: str = field(default=None, init=False) + _community_summarization_graph_prompt: str = field(default=None, init=False) + _community_summarization_text_prompt: str = field(default=None, init=False) + _indexing_method: str = field(default=IndexingMethod.Standard.value, init=False) @staticmethod def _jobs_container(): @@ -56,7 +59,9 @@ def create_item( human_readable_storage_name: str, entity_extraction_prompt: str | None = None, entity_summarization_prompt: str | None = None, - community_summarization_prompt: str | None = None, + community_summarization_graph_prompt: str | None = None, + community_summarization_text_prompt: str | None = None, + indexing_method: str = IndexingMethod.Standard.value, **kwargs, ) -> "PipelineJob": """ @@ -112,7 +117,10 @@ def create_item( instance._entity_extraction_prompt = entity_extraction_prompt instance._entity_summarization_prompt = entity_summarization_prompt - instance._community_summarization_prompt = community_summarization_prompt + instance._community_summarization_graph_prompt = community_summarization_graph_prompt + instance._community_summarization_text_prompt = community_summarization_text_prompt + + instance._indexing_method = IndexingMethod(indexing_method).value # Create the item in the database instance.update_db() @@ -160,9 +168,15 @@ def load_item(cls, id: str) -> "PipelineJob": instance._entity_summarization_prompt = db_item.get( "entity_summarization_prompt" ) - instance._community_summarization_prompt = db_item.get( - "community_summarization_prompt" + instance._community_summarization_graph_prompt = db_item.get( + "community_summarization_graph_prompt" + ) + instance._community_summarization_text_prompt = db_item.get( + "community_summarization_text_prompt" ) + + instance._indexing_method = db_item.get("indexing_method") + return instance @staticmethod @@ -200,14 +214,19 @@ def dump_model(self) -> dict: "status": self._status.value, "percent_complete": self._percent_complete, "progress": self._progress, + "indexing_method": self._indexing_method, } if self._entity_extraction_prompt: model["entity_extraction_prompt"] = self._entity_extraction_prompt if self._entity_summarization_prompt: model["entity_summarization_prompt"] = self._entity_summarization_prompt - if self._community_summarization_prompt: - model["community_summarization_prompt"] = ( - self._community_summarization_prompt + if self._community_summarization_graph_prompt: + model["community_summarization_graph_prompt"] = ( + self._community_summarization_graph_prompt + ) + if self._community_summarization_text_prompt: + model["community_summarization_text_prompt"] = ( + self._community_summarization_text_prompt ) return model @@ -291,14 +310,34 @@ def entity_summarization_prompt(self, entity_summarization_prompt: str) -> None: self.update_db() @property - def community_summarization_prompt(self) -> str: - return self._community_summarization_prompt + def community_summarization_graph_prompt(self) -> str: + return self._community_summarization_graph_prompt + + @community_summarization_graph_prompt.setter + def community_summarization_graph_prompt( + self, community_summarization_graph_prompt: str + ) -> None: + self._community_summarization_graph_prompt = community_summarization_graph_prompt + self.update_db() + + @property + def community_summarization_text_prompt(self) -> str: + return self._community_summarization_text_prompt - @community_summarization_prompt.setter - def community_summarization_prompt( - self, community_summarization_prompt: str + @community_summarization_text_prompt.setter + def community_summarization_text_prompt( + self, community_summarization_text_prompt: str ) -> None: - self._community_summarization_prompt = community_summarization_prompt + self._community_summarization_text_prompt = community_summarization_text_prompt + self.update_db() + + @property + def indexing_method(self) -> str: + return self._indexing_method + + @indexing_method.setter + def indexing_method(self, indexing_method: str) -> None: + self._indexing_method = IndexingMethod(indexing_method).value self.update_db() @property diff --git a/backend/poetry.lock b/backend/poetry.lock index 7dad142d..9bac044c 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "adlfs" @@ -208,18 +208,15 @@ trio = ["trio (>=0.26.1)"] [[package]] name = "anytree" -version = "2.12.1" +version = "2.13.0" description = "Powerful and Lightweight Python Tree Data Structure with various plugins" optional = false -python-versions = ">=3.7.2,<4" +python-versions = "<4.0,>=3.9.2" files = [ - {file = "anytree-2.12.1-py3-none-any.whl", hash = "sha256:5ea9e61caf96db1e5b3d0a914378d2cd83c269dfce1fb8242ce96589fa3382f0"}, - {file = "anytree-2.12.1.tar.gz", hash = "sha256:244def434ccf31b668ed282954e5d315b4e066c4940b94aff4a7962d85947830"}, + {file = "anytree-2.13.0-py3-none-any.whl", hash = "sha256:4cbcf10df36b1f1cba131b7e487ff3edafc9d6e932a3c70071b5b768bab901ff"}, + {file = "anytree-2.13.0.tar.gz", hash = "sha256:c9d3aa6825fdd06af7ebb05b4ef291d2db63e62bb1f9b7d9b71354be9d362714"}, ] -[package.dependencies] -six = "*" - [[package]] name = "appnope" version = "0.1.4" @@ -516,28 +513,28 @@ typing-extensions = ">=4.0.0" [[package]] name = "azure-monitor-opentelemetry" -version = "1.6.6" +version = "1.6.7" description = "Microsoft Azure Monitor Opentelemetry Distro Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure_monitor_opentelemetry-1.6.6-py3-none-any.whl", hash = "sha256:934bfaef0829714641bd7d68d9dd348b5fe8b44aca99c385431df8b1c414d6ee"}, - {file = "azure_monitor_opentelemetry-1.6.6.tar.gz", hash = "sha256:e27031f9759e1dfcf63e2c39b079a83e61c664c51e28044456cb3767851f91e5"}, + {file = "azure_monitor_opentelemetry-1.6.7-py3-none-any.whl", hash = "sha256:31ee068d0fc161cdb87d501f2e9e507a5316faf2c819136ac422b942a196ab90"}, + {file = "azure_monitor_opentelemetry-1.6.7.tar.gz", hash = "sha256:fa52dc679412cdfd64775cb73ff224b92412b2dc60be38db32368b7cbebe04cd"}, ] [package.dependencies] azure-core = ">=1.28.0,<2.0.0" azure-core-tracing-opentelemetry = ">=1.0.0b11,<1.1.0" azure-monitor-opentelemetry-exporter = ">=1.0.0b31,<1.1.0" -opentelemetry-instrumentation-django = ">=0.49b0,<1.0" -opentelemetry-instrumentation-fastapi = ">=0.49b0,<1.0" -opentelemetry-instrumentation-flask = ">=0.49b0,<1.0" -opentelemetry-instrumentation-psycopg2 = ">=0.49b0,<1.0" -opentelemetry-instrumentation-requests = ">=0.49b0,<1.0" -opentelemetry-instrumentation-urllib = ">=0.49b0,<1.0" -opentelemetry-instrumentation-urllib3 = ">=0.49b0,<1.0" +opentelemetry-instrumentation-django = ">=0.53b0,<1.0" +opentelemetry-instrumentation-fastapi = ">=0.53b0,<1.0" +opentelemetry-instrumentation-flask = ">=0.53b0,<1.0" +opentelemetry-instrumentation-psycopg2 = ">=0.53b0,<1.0" +opentelemetry-instrumentation-requests = ">=0.53b0,<1.0" +opentelemetry-instrumentation-urllib = ">=0.53b0,<1.0" +opentelemetry-instrumentation-urllib3 = ">=0.53b0,<1.0" opentelemetry-resource-detector-azure = ">=0.1.4,<0.2.0" -opentelemetry-sdk = ">=1.28,<2.0" +opentelemetry-sdk = ">=1.32,<2.0" [[package]] name = "azure-monitor-opentelemetry-exporter" @@ -668,6 +665,46 @@ webencodings = "*" [package.extras] css = ["tinycss2 (>=1.1.0,<1.5)"] +[[package]] +name = "blis" +version = "1.2.1" +description = "The Blis BLAS-like linear algebra library, as a self-contained C-extension." +optional = false +python-versions = "<3.13,>=3.6" +files = [ + {file = "blis-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112443b90698158ada38f71e74c079c3561e802554a51e9850d487c39db25de0"}, + {file = "blis-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b9f8c4fbc303f47778d1fd47916cae785b6f3beaa2031502112a8c0aa5eb29f6"}, + {file = "blis-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0260ecbbaa890f11d8c88e9ce37d4fc9a91839adc34ba1763ba89424362e54c9"}, + {file = "blis-1.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b70e0693564444b608d765727ab31618de3b92c5f203b9dc6b6a108170a8cea"}, + {file = "blis-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67ae48f73828cf38f65f24b6c6d8ec16f22c99820e0d13e7d97370682fdb023d"}, + {file = "blis-1.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9eff1af9b142fd156a7b83f513061f2e464c4409afb37080fde436e969951703"}, + {file = "blis-1.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d05f07fd37b407edb294322d3b2991b0950a61123076cc380d3e9c3deba77c83"}, + {file = "blis-1.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8d5abc324180918a4d7ef81f31c37907d13e85f2831317cba3edacd4ef9b7d39"}, + {file = "blis-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:8de9a1e536202064b57c60d09ff0886275b50c5878df6d58fb49c731eaf535a7"}, + {file = "blis-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:778c4f72b71f97187e3304acfbd30eab98c9ba1a5b03b65128bc3875400ae604"}, + {file = "blis-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c5f2ffb0ae9c1f5aaa95b9681bcdd9a777d007c501fa220796329b939ca2790"}, + {file = "blis-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4dc5d2d57106bb411633603a5c7d178a0845267c3efc7e5ea4fa7a44772976"}, + {file = "blis-1.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c621271c2843101927407e052b35a67f853da59d5c74e9e070e982c7f82e2e04"}, + {file = "blis-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43f65f882250b817566d7543abd1f6da297f1662e5dd9936e14c04b88285a497"}, + {file = "blis-1.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:78a0613d559ccc426c101c67e8f84e1f93491e29d722c370872c538ee652bd07"}, + {file = "blis-1.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2f5e32e5e5635fc7087b724b53120dbcd86201f56c0405882ce254bc0e493392"}, + {file = "blis-1.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d339c97cc83f53e39c1013d0dcd7d5278c853dc102d931132eeb05b226e28429"}, + {file = "blis-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:8d284323cc994e9b818c32046f1aa3e57bcc41c74e02daebdf0d3bc3e14355cb"}, + {file = "blis-1.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1cd35e94a1a97b37b31b11f097f998a3a0e75ac06d57e6edf7d9597200f55756"}, + {file = "blis-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b6394d27f2259c580df8d13ebe9c0a188a6ace0a689e93d6e49cb15018d4d9c"}, + {file = "blis-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9c127159415dc772f345abc3575e1e2d02bb1ae7cb7f532267d67705be04c66"}, + {file = "blis-1.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f9fa589aa72448009fd5001afb05e69f3bc953fe778b44580fd7d79ee8201a1"}, + {file = "blis-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1aa6150259caf4fa0b527bfc8c1e858542f9ca88a386aa90b93e1ca4c2add6df"}, + {file = "blis-1.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3ba67c09883cae52da3d9e9d3f4305464efedd336032c4d5c6c429b27b16f4c1"}, + {file = "blis-1.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7d9c5fca21b01c4b2f3cb95b71ce7ef95e58b3b62f0d79d1f699178c72c1e03e"}, + {file = "blis-1.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6952a4a1f15e0d1f73cc1206bd71368b32551f2e94852dae288b50c4ea0daf31"}, + {file = "blis-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:bd0360427b1669684cd35a8355be126d7a33992ccac6dcb1fbef5e100f4e3026"}, + {file = "blis-1.2.1.tar.gz", hash = "sha256:1066beedbedc2143c22bd28742658de05694afebacde8d8c2d14dd4b5a96765a"}, +] + +[package.dependencies] +numpy = {version = ">=1.19.0,<3.0.0", markers = "python_version >= \"3.9\""} + [[package]] name = "cachetools" version = "5.5.2" @@ -679,6 +716,17 @@ files = [ {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] +[[package]] +name = "catalogue" +version = "2.0.10" +description = "Super lightweight function registries for your library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "catalogue-2.0.10-py3-none-any.whl", hash = "sha256:58c2de0020aa90f4a2da7dfad161bf7b3b054c86a5f09fcedc0b2b740c109a9f"}, + {file = "catalogue-2.0.10.tar.gz", hash = "sha256:4f56daa940913d3f09d589c191c74e5a6d51762b3a9e37dd53b7437afd6cda15"}, +] + [[package]] name = "certifi" version = "2025.1.31" @@ -895,6 +943,26 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "cloudpathlib" +version = "0.21.0" +description = "pathlib-style classes for cloud storage services." +optional = false +python-versions = ">=3.9" +files = [ + {file = "cloudpathlib-0.21.0-py3-none-any.whl", hash = "sha256:657e95ecd2663f1123b6daa95d49aca4b4bc8a9fa90c07930bdba2c5e295e5ef"}, + {file = "cloudpathlib-0.21.0.tar.gz", hash = "sha256:fb8f6b890a3d37b35f0eabff86721bb8d35dfc6a6be98c1f4d34b19e989c6641"}, +] + +[package.dependencies] +typing-extensions = {version = ">4", markers = "python_version < \"3.11\""} + +[package.extras] +all = ["cloudpathlib[azure]", "cloudpathlib[gs]", "cloudpathlib[s3]"] +azure = ["azure-storage-blob (>=12)", "azure-storage-file-datalake (>=12)"] +gs = ["google-cloud-storage"] +s3 = ["boto3 (>=1.34.0)"] + [[package]] name = "cobble" version = "0.1.4" @@ -951,6 +1019,21 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "confection" +version = "0.1.5" +description = "The sweetest config system for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "confection-0.1.5-py3-none-any.whl", hash = "sha256:e29d3c3f8eac06b3f77eb9dfb4bf2fc6bcc9622a98ca00a698e3d019c6430b14"}, + {file = "confection-0.1.5.tar.gz", hash = "sha256:8e72dd3ca6bd4f48913cd220f10b8275978e740411654b6e8ca6d7008c590f0e"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +srsly = ">=2.4.0,<3.0.0" + [[package]] name = "contourpy" version = "1.3.1" @@ -1276,39 +1359,84 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "cymem" +version = "2.0.11" +description = "Manage calls to calloc/free through Cython" +optional = false +python-versions = "*" +files = [ + {file = "cymem-2.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b4dd8f8c2475c7c9948eefa89c790d83134600858d8d43b90276efd8df3882e"}, + {file = "cymem-2.0.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d46ba0d2e0f749195297d16f2286b55af7d7c084db2b853fdfccece2c000c5dc"}, + {file = "cymem-2.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739c4336b9d04ce9761851e9260ef77508d4a86ee3060e41302bfb6fa82c37de"}, + {file = "cymem-2.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a69c470c2fb118161f49761f9137384f46723c77078b659bba33858e19e46b49"}, + {file = "cymem-2.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:40159f6c92627438de970fd761916e745d70dfd84a7dcc28c1627eb49cee00d8"}, + {file = "cymem-2.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f503f98e6aa333fffbe657a6854f13a9c3de68860795ae21171284213b9c5c09"}, + {file = "cymem-2.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:7f05ed5920cc92d6b958ec5da55bd820d326fe9332b90660e6fa67e3b476ceb1"}, + {file = "cymem-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3ee54039aad3ef65de82d66c40516bf54586287b46d32c91ea0530c34e8a2745"}, + {file = "cymem-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c05ef75b5db217be820604e43a47ccbbafea98ab6659d07cea92fa3c864ea58"}, + {file = "cymem-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d5381e5793ce531bac0dbc00829c8381f18605bb67e4b61d34f8850463da40"}, + {file = "cymem-2.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2b9d3f42d7249ac81802135cad51d707def058001a32f73fc7fbf3de7045ac7"}, + {file = "cymem-2.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:39b78f2195d20b75c2d465732f6b8e8721c5d4eb012777c2cb89bdb45a043185"}, + {file = "cymem-2.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2203bd6525a80d8fd0c94654a263af21c0387ae1d5062cceaebb652bf9bad7bc"}, + {file = "cymem-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:aa54af7314de400634448da1f935b61323da80a49484074688d344fb2036681b"}, + {file = "cymem-2.0.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a0fbe19ce653cd688842d81e5819dc63f911a26e192ef30b0b89f0ab2b192ff2"}, + {file = "cymem-2.0.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de72101dc0e6326f6a2f73e05a438d1f3c6110d41044236d0fbe62925091267d"}, + {file = "cymem-2.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee4395917f6588b8ac1699499128842768b391fe8896e8626950b4da5f9a406"}, + {file = "cymem-2.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02f2b17d760dc3fe5812737b1ce4f684641cdd751d67761d333a3b5ea97b83"}, + {file = "cymem-2.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:04ee6b4041ddec24512d6e969ed6445e57917f01e73b9dabbe17b7e6b27fef05"}, + {file = "cymem-2.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e1048dae7e627ee25f22c87bb670b13e06bc0aecc114b89b959a798d487d1bf4"}, + {file = "cymem-2.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:0c269c7a867d74adeb9db65fa1d226342aacf44d64b7931282f0b0eb22eb6275"}, + {file = "cymem-2.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4a311c82f743275c84f708df89ac5bf60ddefe4713d532000c887931e22941f"}, + {file = "cymem-2.0.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:02ed92bead896cca36abad00502b14fa651bdf5d8319461126a2d5ac8c9674c5"}, + {file = "cymem-2.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44ddd3588379f8f376116384af99e3fb5f90091d90f520c341942618bf22f05e"}, + {file = "cymem-2.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ec985623624bbd298762d8163fc194a096cb13282731a017e09ff8a60bb8b1"}, + {file = "cymem-2.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3385a47285435848e0ed66cfd29b35f3ed8703218e2b17bd7a0c053822f26bf"}, + {file = "cymem-2.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5461e65340d6572eb64deadce79242a446a1d39cb7bf70fe7b7e007eb0d799b0"}, + {file = "cymem-2.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:25da111adf425c29af0cfd9fecfec1c71c8d82e2244a85166830a0817a66ada7"}, + {file = "cymem-2.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1450498623d9f176d48578779c4e9d133c7f252f73c5a93b762f35d059a09398"}, + {file = "cymem-2.0.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a407fd8766e1f666c48cb232f760267cecf0acb04cc717d8ec4de6adc6ab8e0"}, + {file = "cymem-2.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6347aed08442679a57bcce5ad1e338f6b717e46654549c5d65c798552d910591"}, + {file = "cymem-2.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d8f11149b1a154de0e93f5eda0a13ad9948a739b58a2aace996ca41bbb6d0f5"}, + {file = "cymem-2.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7a2b4d1a9b1674d6ac0e4c5136b70b805535dc8d1060aa7c4ded3e52fb74e615"}, + {file = "cymem-2.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dec13c1a84612815365939f59e128a0031cae5f6b5a86e4b8fd7c4efa3fad262"}, + {file = "cymem-2.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:332ea5bc1c13c9a186532a06846881288eb846425898b70f047a0820714097bf"}, + {file = "cymem-2.0.11.tar.gz", hash = "sha256:efe49a349d4a518be6b6c6b255d4a80f740a341544bde1a807707c058b88d0bd"}, +] + [[package]] name = "debugpy" -version = "1.8.13" +version = "1.8.14" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.13-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:06859f68e817966723ffe046b896b1bd75c665996a77313370336ee9e1de3e90"}, - {file = "debugpy-1.8.13-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c2db69fb8df3168bc857d7b7d2494fed295dfdbde9a45f27b4b152f37520"}, - {file = "debugpy-1.8.13-cp310-cp310-win32.whl", hash = "sha256:46abe0b821cad751fc1fb9f860fb2e68d75e2c5d360986d0136cd1db8cad4428"}, - {file = "debugpy-1.8.13-cp310-cp310-win_amd64.whl", hash = "sha256:dc7b77f5d32674686a5f06955e4b18c0e41fb5a605f5b33cf225790f114cfeec"}, - {file = "debugpy-1.8.13-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:eee02b2ed52a563126c97bf04194af48f2fe1f68bb522a312b05935798e922ff"}, - {file = "debugpy-1.8.13-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4caca674206e97c85c034c1efab4483f33971d4e02e73081265ecb612af65377"}, - {file = "debugpy-1.8.13-cp311-cp311-win32.whl", hash = "sha256:7d9a05efc6973b5aaf076d779cf3a6bbb1199e059a17738a2aa9d27a53bcc888"}, - {file = "debugpy-1.8.13-cp311-cp311-win_amd64.whl", hash = "sha256:62f9b4a861c256f37e163ada8cf5a81f4c8d5148fc17ee31fb46813bd658cdcc"}, - {file = "debugpy-1.8.13-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:2b8de94c5c78aa0d0ed79023eb27c7c56a64c68217d881bee2ffbcb13951d0c1"}, - {file = "debugpy-1.8.13-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887d54276cefbe7290a754424b077e41efa405a3e07122d8897de54709dbe522"}, - {file = "debugpy-1.8.13-cp312-cp312-win32.whl", hash = "sha256:3872ce5453b17837ef47fb9f3edc25085ff998ce63543f45ba7af41e7f7d370f"}, - {file = "debugpy-1.8.13-cp312-cp312-win_amd64.whl", hash = "sha256:63ca7670563c320503fea26ac688988d9d6b9c6a12abc8a8cf2e7dd8e5f6b6ea"}, - {file = "debugpy-1.8.13-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:31abc9618be4edad0b3e3a85277bc9ab51a2d9f708ead0d99ffb5bb750e18503"}, - {file = "debugpy-1.8.13-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0bd87557f97bced5513a74088af0b84982b6ccb2e254b9312e29e8a5c4270eb"}, - {file = "debugpy-1.8.13-cp313-cp313-win32.whl", hash = "sha256:5268ae7fdca75f526d04465931cb0bd24577477ff50e8bb03dab90983f4ebd02"}, - {file = "debugpy-1.8.13-cp313-cp313-win_amd64.whl", hash = "sha256:79ce4ed40966c4c1631d0131606b055a5a2f8e430e3f7bf8fd3744b09943e8e8"}, - {file = "debugpy-1.8.13-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:acf39a6e98630959763f9669feddee540745dfc45ad28dbc9bd1f9cd60639391"}, - {file = "debugpy-1.8.13-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:924464d87e7d905eb0d79fb70846558910e906d9ee309b60c4fe597a2e802590"}, - {file = "debugpy-1.8.13-cp38-cp38-win32.whl", hash = "sha256:3dae443739c6b604802da9f3e09b0f45ddf1cf23c99161f3a1a8039f61a8bb89"}, - {file = "debugpy-1.8.13-cp38-cp38-win_amd64.whl", hash = "sha256:ed93c3155fc1f888ab2b43626182174e457fc31b7781cd1845629303790b8ad1"}, - {file = "debugpy-1.8.13-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:6fab771639332bd8ceb769aacf454a30d14d7a964f2012bf9c4e04c60f16e85b"}, - {file = "debugpy-1.8.13-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32b6857f8263a969ce2ca098f228e5cc0604d277447ec05911a8c46cf3e7e307"}, - {file = "debugpy-1.8.13-cp39-cp39-win32.whl", hash = "sha256:f14d2c4efa1809da125ca62df41050d9c7cd9cb9e380a2685d1e453c4d450ccb"}, - {file = "debugpy-1.8.13-cp39-cp39-win_amd64.whl", hash = "sha256:ea869fe405880327497e6945c09365922c79d2a1eed4c3ae04d77ac7ae34b2b5"}, - {file = "debugpy-1.8.13-py2.py3-none-any.whl", hash = "sha256:d4ba115cdd0e3a70942bd562adba9ec8c651fe69ddde2298a1be296fc331906f"}, - {file = "debugpy-1.8.13.tar.gz", hash = "sha256:837e7bef95bdefba426ae38b9a94821ebdc5bea55627879cd48165c90b9e50ce"}, + {file = "debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339"}, + {file = "debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79"}, + {file = "debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987"}, + {file = "debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84"}, + {file = "debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9"}, + {file = "debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2"}, + {file = "debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2"}, + {file = "debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01"}, + {file = "debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84"}, + {file = "debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826"}, + {file = "debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f"}, + {file = "debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f"}, + {file = "debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f"}, + {file = "debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15"}, + {file = "debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e"}, + {file = "debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e"}, + {file = "debugpy-1.8.14-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:d5582bcbe42917bc6bbe5c12db1bffdf21f6bfc28d4554b738bf08d50dc0c8c3"}, + {file = "debugpy-1.8.14-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5349b7c3735b766a281873fbe32ca9cca343d4cc11ba4a743f84cb854339ff35"}, + {file = "debugpy-1.8.14-cp38-cp38-win32.whl", hash = "sha256:7118d462fe9724c887d355eef395fae68bc764fd862cdca94e70dcb9ade8a23d"}, + {file = "debugpy-1.8.14-cp38-cp38-win_amd64.whl", hash = "sha256:d235e4fa78af2de4e5609073972700523e372cf5601742449970110d565ca28c"}, + {file = "debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f"}, + {file = "debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea"}, + {file = "debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d"}, + {file = "debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123"}, + {file = "debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20"}, + {file = "debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322"}, ] [[package]] @@ -1569,6 +1697,7 @@ files = [ {file = "fastparquet-2024.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2d7f02f57231e6c86d26e9ea71953737202f20e948790e5d4db6d6a1a150dc"}, {file = "fastparquet-2024.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbe4468146b633d8f09d7b196fea0547f213cb5ce5f76e9d1beb29eaa9593a93"}, {file = "fastparquet-2024.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:29d5c718817bcd765fc519b17f759cad4945974421ecc1931d3bdc3e05e57fa9"}, + {file = "fastparquet-2024.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:74a0b3c40ab373442c0fda96b75a36e88745d8b138fcc3a6143e04682cbbb8ca"}, {file = "fastparquet-2024.11.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:59e5c5b51083d5b82572cdb7aed0346e3181e3ac9d2e45759da2e804bdafa7ee"}, {file = "fastparquet-2024.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdadf7b6bad789125b823bfc5b0a719ba5c4a2ef965f973702d3ea89cff057f6"}, {file = "fastparquet-2024.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46b2db02fc2a1507939d35441c8ab211d53afd75d82eec9767d1c3656402859b"}, @@ -1655,24 +1784,29 @@ files = [ [[package]] name = "fnllm" -version = "0.0.10" +version = "0.2.3" description = "A function-based LLM protocol and wrapper." optional = false python-versions = ">=3.10" files = [ - {file = "fnllm-0.0.10-py3-none-any.whl", hash = "sha256:e676001d9b0ebbe194590393d427385760adaefcab6a456268e4f13a0e9d2cb6"}, - {file = "fnllm-0.0.10.tar.gz", hash = "sha256:ece859432b83a462dc35db6483f36313ff935b79f437186daa44e3679f4f49cf"}, + {file = "fnllm-0.2.3-py3-none-any.whl", hash = "sha256:7fefdbea16a24651377d6ed6262353ee1606b1243db275ba9d67e9da43f23372"}, + {file = "fnllm-0.2.3.tar.gz", hash = "sha256:cec5c0322c65b0e563cccf1628eb8d69efc7e52ce004a4c62853712db38966bd"}, ] [package.dependencies] aiolimiter = ">=1.1.0" +azure-identity = {version = ">=1.17.1", optional = true, markers = "extra == \"azure\""} +azure-storage-blob = {version = ">=12.20.0", optional = true, markers = "extra == \"azure\""} httpx = ">=0.27.0" json-repair = ">=0.30.0" +openai = {version = ">=1.35.12", optional = true, markers = "extra == \"openai\""} pydantic = ">=2.8.2" tenacity = ">=8.5.0" +tiktoken = {version = ">=0.7.0", optional = true, markers = "extra == \"openai\""} [package.extras] azure = ["azure-identity (>=1.17.1)", "azure-storage-blob (>=12.20.0)"] +numpy = ["numpy (>=1.26.4)"] openai = ["openai (>=1.35.12)", "tiktoken (>=0.7.0)"] [[package]] @@ -1982,45 +2116,49 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "graphrag" -version = "1.2.0" +version = "2.1.0" description = "GraphRAG: A graph-based retrieval-augmented generation (RAG) system." optional = false -python-versions = "<3.13,>=3.10" -files = [ - {file = "graphrag-1.2.0-py3-none-any.whl", hash = "sha256:7f7312d57122a3f100e60ff123b7034faaf62eee3fd2d859418e3546118a571d"}, - {file = "graphrag-1.2.0.tar.gz", hash = "sha256:c1396cdd48fd67384bd40ae9aec39a65ecaece4b7d1cf7706a270034d8e87a53"}, -] - -[package.dependencies] -aiofiles = ">=24.1.0,<25.0.0" -azure-cosmos = ">=4.9.0,<5.0.0" -azure-identity = ">=1.19.0,<2.0.0" -azure-search-documents = ">=11.5.2,<12.0.0" -azure-storage-blob = ">=12.24.0,<13.0.0" -devtools = ">=0.12.2,<0.13.0" -environs = ">=11.0.0,<12.0.0" -fnllm = ">=0.0.10,<0.0.11" -future = ">=1.0.0,<2.0.0" -graspologic = ">=3.4.1,<4.0.0" -httpx = ">=0.28.1,<0.29.0" -json-repair = ">=0.30.3,<0.31.0" -lancedb = ">=0.17.0,<0.18.0" -networkx = ">=3.4.2,<4.0.0" +python-versions = ">=3.10,<3.13" +files = [] +develop = false + +[package.dependencies] +aiofiles = "^24.1.0" +azure-cosmos = "^4.9.0" +azure-identity = "^1.19.0" +azure-search-documents = "^11.5.2" +azure-storage-blob = "^12.24.0" +devtools = "^0.12.2" +environs = "^11.0.0" +fnllm = {version = "0.2.3", extras = ["azure", "openai"]} +future = "^1.0.0" +graspologic = "^3.4.1" +json-repair = "^0.30.3" +lancedb = "^0.17.0" +networkx = "^3.4.2" nltk = "3.9.1" -numpy = ">=1.25.2,<2.0.0" -openai = ">=1.57.0,<2.0.0" -pandas = ">=2.2.3,<3.0.0" -pyarrow = ">=15.0.0,<16.0.0" -pydantic = ">=2.10.3,<3.0.0" -python-dotenv = ">=1.0.1,<2.0.0" -pyyaml = ">=6.0.2,<7.0.0" -rich = ">=13.9.4,<14.0.0" -tenacity = ">=9.0.0,<10.0.0" -tiktoken = ">=0.8.0,<0.9.0" -tqdm = ">=4.67.1,<5.0.0" -typer = ">=0.15.1,<0.16.0" -typing-extensions = ">=4.12.2,<5.0.0" -umap-learn = ">=0.5.6,<0.6.0" +numpy = "^1.25.2" +openai = "^1.57.0" +pandas = "^2.2.3" +pyarrow = "^15.0.0" +pydantic = "^2.10.3" +python-dotenv = "^1.0.1" +pyyaml = "^6.0.2" +rich = "^13.9.4" +spacy = "^3.8.4" +textblob = "^0.18.0.post0" +tiktoken = "^0.8.0" +tqdm = "^4.67.1" +typer = "^0.15.1" +typing-extensions = "^4.12.2" +umap-learn = "^0.5.6" + +[package.source] +type = "git" +url = "https://github.com/microsoft/graphrag.git" +reference = "ffd8db7104defdcd131e5af38473d2f0815e3cf7" +resolved_reference = "ffd8db7104defdcd131e5af38473d2f0815e3cf7" [[package]] name = "graspologic" @@ -2287,21 +2425,21 @@ test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "n [[package]] name = "ipywidgets" -version = "8.1.5" +version = "8.1.6" description = "Jupyter interactive widgets" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, - {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, + {file = "ipywidgets-8.1.6-py3-none-any.whl", hash = "sha256:446e7630a1d025bdc7635e1169fcc06f2ce33b5bd41c2003edeb4a47c8d4bbb1"}, + {file = "ipywidgets-8.1.6.tar.gz", hash = "sha256:d8ace49c66f14419fc66071371b99d01bed230bbc15d8a60233b18bfbd782851"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.12,<3.1.0" +jupyterlab_widgets = ">=3.0.14,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.12,<4.1.0" +widgetsnbextension = ">=4.0.14,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -2794,13 +2932,13 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v [[package]] name = "jupyterlab-widgets" -version = "3.0.13" +version = "3.0.14" description = "Jupyter interactive widgets for JupyterLab" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, - {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, + {file = "jupyterlab_widgets-3.0.14-py3-none-any.whl", hash = "sha256:54c33e3306b7fca139d165d6190dc6c0627aafa5d14adfc974a4e9a3d26cb703"}, + {file = "jupyterlab_widgets-3.0.14.tar.gz", hash = "sha256:bad03e59546869f026e537e0d170e454259e6dc7048e14041707ca31e523c8a1"}, ] [[package]] @@ -2950,6 +3088,42 @@ docs = ["mkdocs", "mkdocs-jupyter", "mkdocs-material", "mkdocstrings[python]"] embeddings = ["awscli (>=1.29.57)", "boto3 (>=1.28.57)", "botocore (>=1.31.57)", "cohere", "google-generativeai", "huggingface-hub", "ibm-watsonx-ai (>=1.1.2)", "instructorembedding", "ollama", "open-clip-torch", "openai (>=1.6.1)", "pillow", "requests (>=2.31.0)", "sentence-transformers", "torch"] tests = ["aiohttp", "boto3", "duckdb", "pandas (>=1.4)", "polars (>=0.19,<=1.3.0)", "pytest", "pytest-asyncio", "pytest-mock", "pytz", "tantivy"] +[[package]] +name = "langcodes" +version = "3.5.0" +description = "Tools for labeling human languages with IETF language tags" +optional = false +python-versions = ">=3.9" +files = [ + {file = "langcodes-3.5.0-py3-none-any.whl", hash = "sha256:853c69d1a35e0e13da2f427bb68fb2fa4a8f4fb899e0c62ad8df8d073dcfed33"}, + {file = "langcodes-3.5.0.tar.gz", hash = "sha256:1eef8168d07e51e131a2497ffecad4b663f6208e7c3ae3b8dc15c51734a6f801"}, +] + +[package.dependencies] +language-data = ">=1.2" + +[package.extras] +build = ["build", "twine"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "language-data" +version = "1.3.0" +description = "Supplementary data about languages used by the langcodes module" +optional = false +python-versions = "*" +files = [ + {file = "language_data-1.3.0-py3-none-any.whl", hash = "sha256:e2ee943551b5ae5f89cd0e801d1fc3835bb0ef5b7e9c3a4e8e17b2b214548fbf"}, + {file = "language_data-1.3.0.tar.gz", hash = "sha256:7600ef8aa39555145d06c89f0c324bf7dab834ea0b0a439d8243762e3ebad7ec"}, +] + +[package.dependencies] +marisa-trie = ">=1.1.0" + +[package.extras] +build = ["build", "twine"] +test = ["pytest", "pytest-cov"] + [[package]] name = "llvmlite" version = "0.44.0" @@ -3168,6 +3342,97 @@ files = [ [package.dependencies] cobble = ">=0.1.3,<0.2" +[[package]] +name = "marisa-trie" +version = "1.2.1" +description = "Static memory-efficient and fast Trie-like structures for Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "marisa_trie-1.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a2eb41d2f9114d8b7bd66772c237111e00d2bae2260824560eaa0a1e291ce9e8"}, + {file = "marisa_trie-1.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e956e6a46f604b17d570901e66f5214fb6f658c21e5e7665deace236793cef6"}, + {file = "marisa_trie-1.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd45142501300e7538b2e544905580918b67b1c82abed1275fe4c682c95635fa"}, + {file = "marisa_trie-1.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8443d116c612cfd1961fbf76769faf0561a46d8e317315dd13f9d9639ad500c"}, + {file = "marisa_trie-1.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:875a6248e60fbb48d947b574ffa4170f34981f9e579bde960d0f9a49ea393ecc"}, + {file = "marisa_trie-1.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:746a7c60a17fccd3cfcfd4326926f02ea4fcdfc25d513411a0c4fc8e4a1ca51f"}, + {file = "marisa_trie-1.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e70869737cc0e5bd903f620667da6c330d6737048d1f44db792a6af68a1d35be"}, + {file = "marisa_trie-1.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06b099dd743676dbcd8abd8465ceac8f6d97d8bfaabe2c83b965495523b4cef2"}, + {file = "marisa_trie-1.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d2a82eb21afdaf22b50d9b996472305c05ca67fc4ff5a026a220320c9c961db6"}, + {file = "marisa_trie-1.2.1-cp310-cp310-win32.whl", hash = "sha256:8951e7ce5d3167fbd085703b4cbb3f47948ed66826bef9a2173c379508776cf5"}, + {file = "marisa_trie-1.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:5685a14b3099b1422c4f59fa38b0bf4b5342ee6cc38ae57df9666a0b28eeaad3"}, + {file = "marisa_trie-1.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ed3fb4ed7f2084597e862bcd56c56c5529e773729a426c083238682dba540e98"}, + {file = "marisa_trie-1.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe69fb9ffb2767746181f7b3b29bbd3454d1d24717b5958e030494f3d3cddf3"}, + {file = "marisa_trie-1.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4728ed3ae372d1ea2cdbd5eaa27b8f20a10e415d1f9d153314831e67d963f281"}, + {file = "marisa_trie-1.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cf4f25cf895692b232f49aa5397af6aba78bb679fb917a05fce8d3cb1ee446d"}, + {file = "marisa_trie-1.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cca7f96236ffdbf49be4b2e42c132e3df05968ac424544034767650913524de"}, + {file = "marisa_trie-1.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7eb20bf0e8b55a58d2a9b518aabc4c18278787bdba476c551dd1c1ed109e509"}, + {file = "marisa_trie-1.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b1ec93f0d1ee6d7ab680a6d8ea1a08bf264636358e92692072170032dda652ba"}, + {file = "marisa_trie-1.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e2699255d7ac610dee26d4ae7bda5951d05c7d9123a22e1f7c6a6f1964e0a4e4"}, + {file = "marisa_trie-1.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c484410911182457a8a1a0249d0c09c01e2071b78a0a8538cd5f7fa45589b13a"}, + {file = "marisa_trie-1.2.1-cp311-cp311-win32.whl", hash = "sha256:ad548117744b2bcf0e3d97374608be0a92d18c2af13d98b728d37cd06248e571"}, + {file = "marisa_trie-1.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:436f62d27714970b9cdd3b3c41bdad046f260e62ebb0daa38125ef70536fc73b"}, + {file = "marisa_trie-1.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:638506eacf20ca503fff72221a7e66a6eadbf28d6a4a6f949fcf5b1701bb05ec"}, + {file = "marisa_trie-1.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de1665eaafefa48a308e4753786519888021740501a15461c77bdfd57638e6b4"}, + {file = "marisa_trie-1.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f713af9b8aa66a34cd3a78c7d150a560a75734713abe818a69021fd269e927fa"}, + {file = "marisa_trie-1.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2a7d00f53f4945320b551bccb826b3fb26948bde1a10d50bb9802fabb611b10"}, + {file = "marisa_trie-1.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98042040d1d6085792e8d0f74004fc0f5f9ca6091c298f593dd81a22a4643854"}, + {file = "marisa_trie-1.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6532615111eec2c79e711965ece0bc95adac1ff547a7fff5ffca525463116deb"}, + {file = "marisa_trie-1.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:20948e40ab2038e62b7000ca6b4a913bc16c91a2c2e6da501bd1f917eeb28d51"}, + {file = "marisa_trie-1.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66b23e5b35dd547f85bf98db7c749bc0ffc57916ade2534a6bbc32db9a4abc44"}, + {file = "marisa_trie-1.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6704adf0247d2dda42e876b793be40775dff46624309ad99bc7537098bee106d"}, + {file = "marisa_trie-1.2.1-cp312-cp312-win32.whl", hash = "sha256:3ad356442c2fea4c2a6f514738ddf213d23930f942299a2b2c05df464a00848a"}, + {file = "marisa_trie-1.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2806f75817392cedcacb24ac5d80b0350dde8d3861d67d045c1d9b109764114"}, + {file = "marisa_trie-1.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b5ea16e69bfda0ac028c921b58de1a4aaf83d43934892977368579cd3c0a2554"}, + {file = "marisa_trie-1.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9f627f4e41be710b6cb6ed54b0128b229ac9d50e2054d9cde3af0fef277c23cf"}, + {file = "marisa_trie-1.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5e649f3dc8ab5476732094f2828cc90cac3be7c79bc0c8318b6fda0c1d248db4"}, + {file = "marisa_trie-1.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46e528ee71808c961baf8c3ce1c46a8337ec7a96cc55389d11baafe5b632f8e9"}, + {file = "marisa_trie-1.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36aa4401a1180615f74d575571a6550081d84fc6461e9aefc0bb7b2427af098e"}, + {file = "marisa_trie-1.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce59bcd2cda9bb52b0e90cc7f36413cd86c3d0ce7224143447424aafb9f4aa48"}, + {file = "marisa_trie-1.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f4cd800704a5fc57e53c39c3a6b0c9b1519ebdbcb644ede3ee67a06eb542697d"}, + {file = "marisa_trie-1.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2428b495003c189695fb91ceeb499f9fcced3a2dce853e17fa475519433c67ff"}, + {file = "marisa_trie-1.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:735c363d9aaac82eaf516a28f7c6b95084c2e176d8231c87328dc80e112a9afa"}, + {file = "marisa_trie-1.2.1-cp313-cp313-win32.whl", hash = "sha256:eba6ca45500ca1a042466a0684aacc9838e7f20fe2605521ee19f2853062798f"}, + {file = "marisa_trie-1.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:aa7cd17e1c690ce96c538b2f4aae003d9a498e65067dd433c52dd069009951d4"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5e43891a37b0d7f618819fea14bd951289a0a8e3dd0da50c596139ca83ebb9b1"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6946100a43f933fad6bc458c502a59926d80b321d5ac1ed2ff9c56605360496f"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4177dc0bd1374e82be9b2ba4d0c2733b0a85b9d154ceeea83a5bee8c1e62fbf"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f35c2603a6be168088ed1db6ad1704b078aa8f39974c60888fbbced95dcadad4"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d659fda873d8dcb2c14c2c331de1dee21f5a902d7f2de7978b62c6431a8850ef"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:b0ef26733d3c836be79e812071e1a431ce1f807955a27a981ebb7993d95f842b"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:536ea19ce6a2ce61c57fed4123ecd10d18d77a0db45cd2741afff2b8b68f15b3"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-win32.whl", hash = "sha256:0ee6cf6a16d9c3d1c94e21c8e63c93d8b34bede170ca4e937e16e1c0700d399f"}, + {file = "marisa_trie-1.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7e7b1786e852e014d03e5f32dbd991f9a9eb223dd3fa9a2564108b807e4b7e1c"}, + {file = "marisa_trie-1.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:952af3a5859c3b20b15a00748c36e9eb8316eb2c70bd353ae1646da216322908"}, + {file = "marisa_trie-1.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24a81aa7566e4ec96fc4d934581fe26d62eac47fc02b35fa443a0bb718b471e8"}, + {file = "marisa_trie-1.2.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9c9b32b14651a6dcf9e8857d2df5d29d322a1ea8c0be5c8ffb88f9841c4ec62b"}, + {file = "marisa_trie-1.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ac170d20b97beb75059ba65d1ccad6b434d777c8992ab41ffabdade3b06dd74"}, + {file = "marisa_trie-1.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da4e4facb79614cc4653cfd859f398e4db4ca9ab26270ff12610e50ed7f1f6c6"}, + {file = "marisa_trie-1.2.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25688f34cac3bec01b4f655ffdd6c599a01f0bd596b4a79cf56c6f01a7df3560"}, + {file = "marisa_trie-1.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1db3213b451bf058d558f6e619bceff09d1d130214448a207c55e1526e2773a1"}, + {file = "marisa_trie-1.2.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d5648c6dcc5dc9200297fb779b1663b8a4467bda034a3c69bd9c32d8afb33b1d"}, + {file = "marisa_trie-1.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5bd39a4e1cc839a88acca2889d17ebc3f202a5039cd6059a13148ce75c8a6244"}, + {file = "marisa_trie-1.2.1-cp38-cp38-win32.whl", hash = "sha256:594f98491a96c7f1ffe13ce292cef1b4e63c028f0707effdea0f113364c1ae6c"}, + {file = "marisa_trie-1.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:5fe5a286f997848a410eebe1c28657506adaeb405220ee1e16cfcfd10deb37f2"}, + {file = "marisa_trie-1.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c0fe2ace0cb1806badbd1c551a8ec2f8d4cf97bf044313c082ef1acfe631ddca"}, + {file = "marisa_trie-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:67f0c2ec82c20a02c16fc9ba81dee2586ef20270127c470cb1054767aa8ba310"}, + {file = "marisa_trie-1.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3c98613180cf1730e221933ff74b454008161b1a82597e41054127719964188"}, + {file = "marisa_trie-1.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:429858a0452a7bedcf67bc7bb34383d00f666c980cb75a31bcd31285fbdd4403"}, + {file = "marisa_trie-1.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2eacb84446543082ec50f2fb563f1a94c96804d4057b7da8ed815958d0cdfbe"}, + {file = "marisa_trie-1.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:852d7bcf14b0c63404de26e7c4c8d5d65ecaeca935e93794331bc4e2f213660b"}, + {file = "marisa_trie-1.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e58788004adda24c401d1751331618ed20c507ffc23bfd28d7c0661a1cf0ad16"}, + {file = "marisa_trie-1.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aefe0973cc4698e0907289dc0517ab0c7cdb13d588201932ff567d08a50b0e2e"}, + {file = "marisa_trie-1.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6c50c861faad0a5c091bd763e0729f958c316e678dfa065d3984fbb9e4eacbcd"}, + {file = "marisa_trie-1.2.1-cp39-cp39-win32.whl", hash = "sha256:b1ce340da608530500ab4f963f12d6bfc8d8680900919a60dbdc9b78c02060a4"}, + {file = "marisa_trie-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:ce37d8ca462bb64cc13f529b9ed92f7b21fe8d1f1679b62e29f9cb7d0e888b49"}, + {file = "marisa_trie-1.2.1.tar.gz", hash = "sha256:3a27c408e2aefc03e0f1d25b2ff2afb85aac3568f6fa2ae2a53b57a2e87ce29d"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +test = ["hypothesis", "pytest", "readme-renderer"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -3521,108 +3786,165 @@ async = ["aiodns", "aiohttp (>=3.0)"] [[package]] name = "multidict" -version = "6.3.2" +version = "6.4.3" description = "multidict implementation" optional = false python-versions = ">=3.9" files = [ - {file = "multidict-6.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8b3dc0eec9304fa04d84a51ea13b0ec170bace5b7ddeaac748149efd316f1504"}, - {file = "multidict-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9534f3d84addd3b6018fa83f97c9d4247aaa94ac917d1ed7b2523306f99f5c16"}, - {file = "multidict-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a003ce1413ae01f0b8789c1c987991346a94620a4d22210f7a8fe753646d3209"}, - {file = "multidict-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b43f7384e68b1b982c99f489921a459467b5584bdb963b25e0df57c9039d0ad"}, - {file = "multidict-6.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d142ae84047262dc75c1f92eaf95b20680f85ce11d35571b4c97e267f96fadc4"}, - {file = "multidict-6.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec7e86fbc48aa1d6d686501a8547818ba8d645e7e40eaa98232a5d43ee4380ad"}, - {file = "multidict-6.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe019fb437632b016e6cac67a7e964f1ef827ef4023f1ca0227b54be354da97e"}, - {file = "multidict-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b60cb81214a9da7cfd8ae2853d5e6e47225ece55fe5833142fe0af321c35299"}, - {file = "multidict-6.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32d9e8ef2e0312d4e96ca9adc88e0675b6d8e144349efce4a7c95d5ccb6d88e0"}, - {file = "multidict-6.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:335d584312e3fa43633d63175dfc1a5f137dd7aa03d38d1310237d54c3032774"}, - {file = "multidict-6.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b8df917faa6b8cac3d6870fc21cb7e4d169faca68e43ffe568c156c9c6408a4d"}, - {file = "multidict-6.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:cc060b9b89b701dd8fedef5b99e1f1002b8cb95072693233a63389d37e48212d"}, - {file = "multidict-6.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2ce3be2500658f3c644494b934628bb0c82e549dde250d2119689ce791cc8b8"}, - {file = "multidict-6.3.2-cp310-cp310-win32.whl", hash = "sha256:dbcb4490d8e74b484449abd51751b8f560dd0a4812eb5dacc6a588498222a9ab"}, - {file = "multidict-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:06944f9ced30f8602be873563ed4df7e3f40958f60b2db39732c11d615a33687"}, - {file = "multidict-6.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a034f41fcd16968c0470d8912d293d7b0d0822fc25739c5c2ff7835b85bc56"}, - {file = "multidict-6.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:352585cec45f5d83d886fc522955492bb436fca032b11d487b12d31c5a81b9e3"}, - {file = "multidict-6.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:da9d89d293511fd0a83a90559dc131f8b3292b6975eb80feff19e5f4663647e2"}, - {file = "multidict-6.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fa716592224aa652b9347a586cfe018635229074565663894eb4eb21f8307f"}, - {file = "multidict-6.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0326278a44c56e94792475268e5cd3d47fbc0bd41ee56928c3bbb103ba7f58fe"}, - {file = "multidict-6.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb1ea87f7fe45e5079f6315e95d64d4ca8b43ef656d98bed63a02e3756853a22"}, - {file = "multidict-6.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cff3c5a98d037024a9065aafc621a8599fad7b423393685dc83cf7a32f8b691"}, - {file = "multidict-6.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed99834b053c655d980fb98029003cb24281e47a796052faad4543aa9e01b8e8"}, - {file = "multidict-6.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7048440e505d2b4741e5d0b32bd2f427c901f38c7760fc245918be2cf69b3b85"}, - {file = "multidict-6.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27248c27b563f5889556da8a96e18e98a56ff807ac1a7d56cf4453c2c9e4cd91"}, - {file = "multidict-6.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6323b4ba0e018bd266f776c35f3f0943fc4ee77e481593c9f93bd49888f24e94"}, - {file = "multidict-6.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:81f7ce5ec7c27d0b45c10449c8f0fed192b93251e2e98cb0b21fec779ef1dc4d"}, - {file = "multidict-6.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03bfcf2825b3bed0ba08a9d854acd18b938cab0d2dba3372b51c78e496bac811"}, - {file = "multidict-6.3.2-cp311-cp311-win32.whl", hash = "sha256:f32c2790512cae6ca886920e58cdc8c784bdc4bb2a5ec74127c71980369d18dc"}, - {file = "multidict-6.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:0b0c15e58e038a2cd75ef7cf7e072bc39b5e0488b165902efb27978984bbad70"}, - {file = "multidict-6.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d1e0ba1ce1b8cc79117196642d95f4365e118eaf5fb85f57cdbcc5a25640b2a4"}, - {file = "multidict-6.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:029bbd7d782251a78975214b78ee632672310f9233d49531fc93e8e99154af25"}, - {file = "multidict-6.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d7db41e3b56817d9175264e5fe00192fbcb8e1265307a59f53dede86161b150e"}, - {file = "multidict-6.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcab18e65cc555ac29981a581518c23311f2b1e72d8f658f9891590465383be"}, - {file = "multidict-6.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d50eff89aa4d145a5486b171a2177042d08ea5105f813027eb1050abe91839f"}, - {file = "multidict-6.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:643e57b403d3e240045a3681f9e6a04d35a33eddc501b4cbbbdbc9c70122e7bc"}, - {file = "multidict-6.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d17b37b9715b30605b5bab1460569742d0c309e5c20079263b440f5d7746e7e"}, - {file = "multidict-6.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68acd51fa94e63312b8ddf84bfc9c3d3442fe1f9988bbe1b6c703043af8867fe"}, - {file = "multidict-6.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:347eea2852ab7f697cc5ed9b1aae96b08f8529cca0c6468f747f0781b1842898"}, - {file = "multidict-6.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4d3f8e57027dcda84a1aa181501c15c45eab9566eb6fcc274cbd1e7561224f8"}, - {file = "multidict-6.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9ca57a841ffcf712e47875d026aa49d6e67f9560624d54b51628603700d5d287"}, - {file = "multidict-6.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7cafdafb44c4e646118410368307693e49d19167e5f119cbe3a88697d2d1a636"}, - {file = "multidict-6.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:430120c6ce3715a9c6075cabcee557daccbcca8ba25a9fedf05c7bf564532f2d"}, - {file = "multidict-6.3.2-cp312-cp312-win32.whl", hash = "sha256:13bec31375235a68457ab887ce1bbf4f59d5810d838ae5d7e5b416242e1f3ed4"}, - {file = "multidict-6.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:c3b6d7620e6e90c6d97eaf3a63bf7fbd2ba253aab89120a4a9c660bf2d675391"}, - {file = "multidict-6.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b9ca24700322816ae0d426aa33671cf68242f8cc85cee0d0e936465ddaee90b5"}, - {file = "multidict-6.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d9fbbe23667d596ff4f9f74d44b06e40ebb0ab6b262cf14a284f859a66f86457"}, - {file = "multidict-6.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9cb602c5bea0589570ad3a4a6f2649c4f13cc7a1e97b4c616e5e9ff8dc490987"}, - {file = "multidict-6.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93ca81dd4d1542e20000ed90f4cc84b7713776f620d04c2b75b8efbe61106c99"}, - {file = "multidict-6.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18b6310b5454c62242577a128c87df8897f39dd913311cf2e1298e47dfc089eb"}, - {file = "multidict-6.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a6dda57de1fc9aedfdb600a8640c99385cdab59a5716cb714b52b6005797f77"}, - {file = "multidict-6.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d8ec42d03cc6b29845552a68151f9e623c541f1708328353220af571e24a247"}, - {file = "multidict-6.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80681969cee2fa84dafeb53615d51d24246849984e3e87fbe4fe39956f2e23bf"}, - {file = "multidict-6.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:01489b0c3592bb9d238e5690e9566db7f77a5380f054b57077d2c4deeaade0eb"}, - {file = "multidict-6.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:522d9f1fd995d04dfedc0a40bca7e2591bc577d920079df50b56245a4a252c1c"}, - {file = "multidict-6.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2014e9cf0b4e9c75bbad49c1758e5a9bf967a56184fc5fcc51527425baf5abba"}, - {file = "multidict-6.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:78ced9fcbee79e446ff4bb3018ac7ba1670703de7873d9c1f6f9883db53c71bc"}, - {file = "multidict-6.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1faf01af972bd01216a107c195f5294f9f393531bc3e4faddc9b333581255d4d"}, - {file = "multidict-6.3.2-cp313-cp313-win32.whl", hash = "sha256:7a699ab13d8d8e1f885de1535b4f477fb93836c87168318244c2685da7b7f655"}, - {file = "multidict-6.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:8666bb0d883310c83be01676e302587834dfd185b52758caeab32ef0eb387bc6"}, - {file = "multidict-6.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:d82c95aabee29612b1c4f48b98be98181686eb7d6c0152301f72715705cc787b"}, - {file = "multidict-6.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f47709173ea9e87a7fd05cd7e5cf1e5d4158924ff988a9a8e0fbd853705f0e68"}, - {file = "multidict-6.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c7f9d0276ceaab41b8ae78534ff28ea33d5de85db551cbf80c44371f2b55d13"}, - {file = "multidict-6.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6eab22df44a25acab2e738f882f5ec551282ab45b2bbda5301e6d2cfb323036"}, - {file = "multidict-6.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a947cb7c657f57874021b9b70c7aac049c877fb576955a40afa8df71d01a1390"}, - {file = "multidict-6.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5faa346e8e1c371187cf345ab1e02a75889f9f510c9cbc575c31b779f7df084d"}, - {file = "multidict-6.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc6e08d977aebf1718540533b4ba5b351ccec2db093370958a653b1f7f9219cc"}, - {file = "multidict-6.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98eab7acf55275b5bf09834125fa3a80b143a9f241cdcdd3f1295ffdc3c6d097"}, - {file = "multidict-6.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:36863655630becc224375c0b99364978a0f95aebfb27fb6dd500f7fb5fb36e79"}, - {file = "multidict-6.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d9c0979c096c0d46a963331b0e400d3a9e560e41219df4b35f0d7a2f28f39710"}, - {file = "multidict-6.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0efc04f70f05e70e5945890767e8874da5953a196f5b07c552d305afae0f3bf6"}, - {file = "multidict-6.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:2c519b3b82c34539fae3e22e4ea965869ac6b628794b1eb487780dde37637ab7"}, - {file = "multidict-6.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:329160e301f2afd7b43725d3dda8a7ef8ee41d4ceac2083fc0d8c1cc8a4bd56b"}, - {file = "multidict-6.3.2-cp313-cp313t-win32.whl", hash = "sha256:420e5144a5f598dad8db3128f1695cd42a38a0026c2991091dab91697832f8cc"}, - {file = "multidict-6.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:875faded2861c7af2682c67088e6313fec35ede811e071c96d36b081873cea14"}, - {file = "multidict-6.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2516c5eb5732d6c4e29fa93323bfdc55186895124bc569e2404e3820934be378"}, - {file = "multidict-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:be5c8622e665cc5491c13c0fcd52915cdbae991a3514251d71129691338cdfb2"}, - {file = "multidict-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ef33150eea7953cfdb571d862cff894e0ad97ab80d97731eb4b9328fc32d52b"}, - {file = "multidict-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40b357738ce46e998f1b1bad9c4b79b2a9755915f71b87a8c01ce123a22a4f99"}, - {file = "multidict-6.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c60e059fcd3655a653ba99fec2556cd0260ec57f9cb138d3e6ffc413638a2e"}, - {file = "multidict-6.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:629e7c5e75bde83e54a22c7043ce89d68691d1f103be6d09a1c82b870df3b4b8"}, - {file = "multidict-6.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6c8fc97d893fdf1fff15a619fee8de2f31c9b289ef7594730e35074fa0cefb"}, - {file = "multidict-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52081d2f27e0652265d4637b03f09b82f6da5ce5e1474f07dc64674ff8bfc04c"}, - {file = "multidict-6.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:64529dc395b5fd0a7826ffa70d2d9a7f4abd8f5333d6aaaba67fdf7bedde9f21"}, - {file = "multidict-6.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2b7c3fad827770840f5399348c89635ed6d6e9bba363baad7d3c7f86a9cf1da3"}, - {file = "multidict-6.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:24aa42b1651c654ae9e5273e06c3b7ccffe9f7cc76fbde40c37e9ae65f170818"}, - {file = "multidict-6.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:04ceea01e9991357164b12882e120ce6b4d63a0424bb9f9cd37910aa56d30830"}, - {file = "multidict-6.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:943897a41160945416617db567d867ab34e9258adaffc56a25a4c3f99d919598"}, - {file = "multidict-6.3.2-cp39-cp39-win32.whl", hash = "sha256:76157a9a0c5380aadd3b5ff7b8deee355ff5adecc66c837b444fa633b4d409a2"}, - {file = "multidict-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:d091d123e44035cd5664554308477aff0b58db37e701e7598a67e907b98d1925"}, - {file = "multidict-6.3.2-py3-none-any.whl", hash = "sha256:71409d4579f716217f23be2f5e7afca5ca926aaeb398aa11b72d793bff637a1f"}, - {file = "multidict-6.3.2.tar.gz", hash = "sha256:c1035eea471f759fa853dd6e76aaa1e389f93b3e1403093fa0fd3ab4db490678"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] [package.dependencies] typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} +[[package]] +name = "murmurhash" +version = "1.0.12" +description = "Cython bindings for MurmurHash" +optional = false +python-versions = ">=3.6" +files = [ + {file = "murmurhash-1.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3f492bbf6f879b6eaf9da4be7471f4b68a3e3ae525aac0f35c2ae27ec91265c"}, + {file = "murmurhash-1.0.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3493e0c10a64fa72026af2ea2271d8b3511a438de3c6a771b7a57771611b9c08"}, + {file = "murmurhash-1.0.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95989ddbb187b9934e5b0e7f450793a445814b6c293a7bf92df56913c3a87c1e"}, + {file = "murmurhash-1.0.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efef9f9aad98ec915a830f0c53d14ce6807ccc6e14fd2966565ef0b71cfa086"}, + {file = "murmurhash-1.0.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b3147d171a5e5d2953b5eead21d15ea59b424844b4504a692c4b9629191148ed"}, + {file = "murmurhash-1.0.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:736c869bef5023540dde52a9338085ac823eda3f09591ba1b4ed2c09c8b378db"}, + {file = "murmurhash-1.0.12-cp310-cp310-win_amd64.whl", hash = "sha256:b81feb5bfd13bce638ccf910c685b04ad0537635918d04c83b291ce0441776da"}, + {file = "murmurhash-1.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8b236b76a256690e745b63b679892878ec4f01deeeda8d311482a9b183d2d452"}, + {file = "murmurhash-1.0.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8bc3756dd657ed90c1354705e66513c11516929fe726e7bc91c79734d190f394"}, + {file = "murmurhash-1.0.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd41e4c3d7936b69010d76e5edff363bf40fd918d86287a14e924363d7828522"}, + {file = "murmurhash-1.0.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36be2831df750163495e471d24aeef6aca1b2a3c4dfb05f40114859db47ff3f2"}, + {file = "murmurhash-1.0.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b078c10f9c82cbd144b1200061fbfa7f99af9d5d8d7f7d8a324370169e3da7c2"}, + {file = "murmurhash-1.0.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:307ca8da5f038635ded9de722fe11f07f06a2b76442ae272dcccbff6086de487"}, + {file = "murmurhash-1.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:1b4ab5ba5ba909959659989f3bf57903f31f49906fe40f00aec81e32eea69a88"}, + {file = "murmurhash-1.0.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1a4c97c8ffbedb62b760c3c2f77b5b8cb0e0ac0ec83a74d2f289e113e3e92ed5"}, + {file = "murmurhash-1.0.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9574f0b634f059158bb89734a811e435ac9ad2335c02a7abb59f1875dcce244c"}, + {file = "murmurhash-1.0.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:701cc0ce91809b4d7c2e0518be759635205e1e181325792044f5a8118019f716"}, + {file = "murmurhash-1.0.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1c9de2167a9d408d121ebc918bcb20b2718ec956f3aae0ded53d9bb224bb8e"}, + {file = "murmurhash-1.0.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94a52972835bdae8af18147c67c398ff3ea1d875f5b8dca1e1aa0fadb892f546"}, + {file = "murmurhash-1.0.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cc88004c8615dcabe31d21142689f719fdf549ba782850bef389cf227a1df575"}, + {file = "murmurhash-1.0.12-cp312-cp312-win_amd64.whl", hash = "sha256:8c5b8804c07a76f779e67f83aad37bc2189a0e65ebdd3f2b305242d489d31e03"}, + {file = "murmurhash-1.0.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:63f10c6d6ef9ee85073dd896d2c4e0ab161bc6b8e7e9201c69f8061f9f1b6468"}, + {file = "murmurhash-1.0.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:66356f6308fd2a44a8ab056f020acd5bc22302f23ef5cce3705f2493e0fe9c3c"}, + {file = "murmurhash-1.0.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdb2104aa3471324724abf5a3a76fc94bcbeaf023bb6a6dd94da567b8633d8a6"}, + {file = "murmurhash-1.0.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a7ef5fb37e72536458ac4a6f486fb374c60ac4c4862d9195d3d4b58239a91de"}, + {file = "murmurhash-1.0.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8bd5524de195991ce3551b14286ec0b730cc9dd2e10565dad2ae470eec082028"}, + {file = "murmurhash-1.0.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:19de30edaaa2217cd0c41b6cf6bbfa418be5d7fdf267ca92e5e3710d4daac593"}, + {file = "murmurhash-1.0.12-cp313-cp313-win_amd64.whl", hash = "sha256:7dc4ebdfed7ef8ed70519962ac9b704e91978ee14e049f1ff37bca2f579ce84d"}, + {file = "murmurhash-1.0.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c9bb5652a3444d5a5bf5d164e6b5e6c8f5715d031627ff79d58caac0e510e8d8"}, + {file = "murmurhash-1.0.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef56fdee81e2b4191c5b7416b5428cb920260a91f028a82a1680b14137eaf32c"}, + {file = "murmurhash-1.0.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91042b85d3214ebaba505d7349f0bcd745b07e7163459909d622ea10a04c2dea"}, + {file = "murmurhash-1.0.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7de1552326f4f8c0b63d26f823fa66a4dcf9c01164e252374d84bcf86a6af2fe"}, + {file = "murmurhash-1.0.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:16de7dee9e082159b7ad4cffd62b0c03bbc385b84dcff448ce27bb14c505d12d"}, + {file = "murmurhash-1.0.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8b5de26a7235d8794403353423cd65720d8496363ab75248120107559b12a8c6"}, + {file = "murmurhash-1.0.12-cp39-cp39-win_amd64.whl", hash = "sha256:d1ad46f78de3ce3f3a8e8c2f87af32bcede893f047c87389c7325bb1f3f46b47"}, + {file = "murmurhash-1.0.12.tar.gz", hash = "sha256:467b7ee31c1f79f46d00436a1957fc52a0e5801369dd2f30eb7655f380735b5f"}, +] + [[package]] name = "nbclient" version = "0.10.2" @@ -3770,26 +4092,26 @@ files = [ [[package]] name = "notebook" -version = "7.0.7" +version = "7.4.0" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.7-py3-none-any.whl", hash = "sha256:289b606d7e173f75a18beb1406ef411b43f97f7a9c55ba03efa3622905a62346"}, - {file = "notebook-7.0.7.tar.gz", hash = "sha256:3bcff00c17b3ac142ef5f436d50637d936b274cfa0b41f6ac0175363de9b4e09"}, + {file = "notebook-7.4.0-py3-none-any.whl", hash = "sha256:005fd21f4db6093a7b739b17df5fe60597811adb07e8255f458db4035d208e3a"}, + {file = "notebook-7.4.0.tar.gz", hash = "sha256:581d88f83709d90ce738dfd1d759892b96e3cbbc9c4a989912ed6c6a08f0d3e8"}, ] [package.dependencies] jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.0.2,<5" -jupyterlab-server = ">=2.22.1,<3" +jupyterlab = ">=4.4.0rc0,<4.5" +jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2,<0.3" tornado = ">=6.2.0" [package.extras] dev = ["hatch", "pre-commit"] docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" @@ -3810,37 +4132,37 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numba" -version = "0.61.0" +version = "0.61.2" description = "compiling Python code using LLVM" optional = false python-versions = ">=3.10" files = [ - {file = "numba-0.61.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9cab9783a700fa428b1a54d65295122bc03b3de1d01fb819a6b9dbbddfdb8c43"}, - {file = "numba-0.61.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46c5ae094fb3706f5adf9021bfb7fc11e44818d61afee695cdee4eadfed45e98"}, - {file = "numba-0.61.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6fb74e81aa78a2303e30593d8331327dfc0d2522b5db05ac967556a26db3ef87"}, - {file = "numba-0.61.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:0ebbd4827091384ab8c4615ba1b3ca8bc639a3a000157d9c37ba85d34cd0da1b"}, - {file = "numba-0.61.0-cp310-cp310-win_amd64.whl", hash = "sha256:43aa4d7d10c542d3c78106b8481e0cbaaec788c39ee8e3d7901682748ffdf0b4"}, - {file = "numba-0.61.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:bf64c2d0f3d161af603de3825172fb83c2600bcb1d53ae8ea568d4c53ba6ac08"}, - {file = "numba-0.61.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de5aa7904741425f28e1028b85850b31f0a245e9eb4f7c38507fb893283a066c"}, - {file = "numba-0.61.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:21c2fe25019267a608e2710a6a947f557486b4b0478b02e45a81cf606a05a7d4"}, - {file = "numba-0.61.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:74250b26ed6a1428763e774dc5b2d4e70d93f73795635b5412b8346a4d054574"}, - {file = "numba-0.61.0-cp311-cp311-win_amd64.whl", hash = "sha256:b72bbc8708e98b3741ad0c63f9929c47b623cc4ee86e17030a4f3e301e8401ac"}, - {file = "numba-0.61.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:152146ecdbb8d8176f294e9f755411e6f270103a11c3ff50cecc413f794e52c8"}, - {file = "numba-0.61.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5cafa6095716fcb081618c28a8d27bf7c001e09696f595b41836dec114be2905"}, - {file = "numba-0.61.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ffe9fe373ed30638d6e20a0269f817b2c75d447141f55a675bfcf2d1fe2e87fb"}, - {file = "numba-0.61.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9f25f7fef0206d55c1cfb796ad833cbbc044e2884751e56e798351280038484c"}, - {file = "numba-0.61.0-cp312-cp312-win_amd64.whl", hash = "sha256:550d389573bc3b895e1ccb18289feea11d937011de4d278b09dc7ed585d1cdcb"}, - {file = "numba-0.61.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:b96fafbdcf6f69b69855273e988696aae4974115a815f6818fef4af7afa1f6b8"}, - {file = "numba-0.61.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f6c452dca1de8e60e593f7066df052dd8da09b243566ecd26d2b796e5d3087d"}, - {file = "numba-0.61.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44240e694d4aa321430c97b21453e46014fe6c7b8b7d932afa7f6a88cc5d7e5e"}, - {file = "numba-0.61.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:764f0e47004f126f58c3b28e0a02374c420a9d15157b90806d68590f5c20cc89"}, - {file = "numba-0.61.0-cp313-cp313-win_amd64.whl", hash = "sha256:074cd38c5b1f9c65a4319d1f3928165f48975ef0537ad43385b2bd908e6e2e35"}, - {file = "numba-0.61.0.tar.gz", hash = "sha256:888d2e89b8160899e19591467e8fdd4970e07606e1fbc248f239c89818d5f925"}, + {file = "numba-0.61.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:cf9f9fc00d6eca0c23fc840817ce9f439b9f03c8f03d6246c0e7f0cb15b7162a"}, + {file = "numba-0.61.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ea0247617edcb5dd61f6106a56255baab031acc4257bddaeddb3a1003b4ca3fd"}, + {file = "numba-0.61.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ae8c7a522c26215d5f62ebec436e3d341f7f590079245a2f1008dfd498cc1642"}, + {file = "numba-0.61.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bd1e74609855aa43661edffca37346e4e8462f6903889917e9f41db40907daa2"}, + {file = "numba-0.61.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae45830b129c6137294093b269ef0a22998ccc27bf7cf096ab8dcf7bca8946f9"}, + {file = "numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2"}, + {file = "numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b"}, + {file = "numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60"}, + {file = "numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18"}, + {file = "numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1"}, + {file = "numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2"}, + {file = "numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8"}, + {file = "numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546"}, + {file = "numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd"}, + {file = "numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18"}, + {file = "numba-0.61.2-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:3a10a8fc9afac40b1eac55717cece1b8b1ac0b946f5065c89e00bde646b5b154"}, + {file = "numba-0.61.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d3bcada3c9afba3bed413fba45845f2fb9cd0d2b27dd58a1be90257e293d140"}, + {file = "numba-0.61.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bdbca73ad81fa196bd53dc12e3aaf1564ae036e0c125f237c7644fe64a4928ab"}, + {file = "numba-0.61.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5f154aaea625fb32cfbe3b80c5456d514d416fcdf79733dd69c0df3a11348e9e"}, + {file = "numba-0.61.2-cp313-cp313-win_amd64.whl", hash = "sha256:59321215e2e0ac5fa928a8020ab00b8e57cda8a97384963ac0dfa4d4e6aa54e7"}, + {file = "numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d"}, ] [package.dependencies] llvmlite = "==0.44.*" -numpy = ">=1.24,<2.2" +numpy = ">=1.24,<2.3" [[package]] name = "numpy" @@ -3954,13 +4276,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.71.0" +version = "1.72.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" files = [ - {file = "openai-1.71.0-py3-none-any.whl", hash = "sha256:e1c643738f1fff1af52bce6ef06a7716c95d089281e7011777179614f32937aa"}, - {file = "openai-1.71.0.tar.gz", hash = "sha256:52b20bb990a1780f9b0b8ccebac93416343ebd3e4e714e3eff730336833ca207"}, + {file = "openai-1.72.0-py3-none-any.whl", hash = "sha256:34f5496ba5c8cb06c592831d69e847e2d164526a2fb92afdc3b5cf2891c328c3"}, + {file = "openai-1.72.0.tar.gz", hash = "sha256:f51de971448905cc90ed5175a5b19e92fd94e31f68cde4025762f9f5257150db"}, ] [package.dependencies] @@ -3994,13 +4316,13 @@ et-xmlfile = "*" [[package]] name = "opentelemetry-api" -version = "1.31.1" +version = "1.32.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_api-1.31.1-py3-none-any.whl", hash = "sha256:1511a3f470c9c8a32eeea68d4ea37835880c0eed09dd1a0187acc8b1301da0a1"}, - {file = "opentelemetry_api-1.31.1.tar.gz", hash = "sha256:137ad4b64215f02b3000a0292e077641c8611aab636414632a9b9068593b7e91"}, + {file = "opentelemetry_api-1.32.0-py3-none-any.whl", hash = "sha256:15df743c765078611f376037b0d9111ec5c1febf2ec9440cdd919370faa1ce55"}, + {file = "opentelemetry_api-1.32.0.tar.gz", hash = "sha256:2623280c916f9b19cad0aa4280cb171265f19fd2909b0d47e4f06f7c83b02cb5"}, ] [package.dependencies] @@ -4009,119 +4331,119 @@ importlib-metadata = ">=6.0,<8.7.0" [[package]] name = "opentelemetry-instrumentation" -version = "0.52b1" +version = "0.53b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation-0.52b1-py3-none-any.whl", hash = "sha256:8c0059c4379d77bbd8015c8d8476020efe873c123047ec069bb335e4b8717477"}, - {file = "opentelemetry_instrumentation-0.52b1.tar.gz", hash = "sha256:739f3bfadbbeec04dd59297479e15660a53df93c131d907bb61052e3d3c1406f"}, + {file = "opentelemetry_instrumentation-0.53b0-py3-none-any.whl", hash = "sha256:70600778fd567c9c5fbfca181378ae179c0dec3ff613171707d3d77c360ff105"}, + {file = "opentelemetry_instrumentation-0.53b0.tar.gz", hash = "sha256:f2c21d71a3cdf28c656e3d90d247ee7558fb9b0239b3d9e9190266499dbed9d2"}, ] [package.dependencies] opentelemetry-api = ">=1.4,<2.0" -opentelemetry-semantic-conventions = "0.52b1" +opentelemetry-semantic-conventions = "0.53b0" packaging = ">=18.0" wrapt = ">=1.0.0,<2.0.0" [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.52b1" +version = "0.53b0" description = "ASGI instrumentation for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_asgi-0.52b1-py3-none-any.whl", hash = "sha256:f7179f477ed665ba21871972f979f21e8534edb971232e11920c8a22f4759236"}, - {file = "opentelemetry_instrumentation_asgi-0.52b1.tar.gz", hash = "sha256:a6dbce9cb5b2c2f45ce4817ad21f44c67fd328358ad3ab911eb46f0be67f82ec"}, + {file = "opentelemetry_instrumentation_asgi-0.53b0-py3-none-any.whl", hash = "sha256:a2e242e0633541150bf8e42ed983f8aeec94acb397bc67a3dbdb47933bfdc7f8"}, + {file = "opentelemetry_instrumentation_asgi-0.53b0.tar.gz", hash = "sha256:b82d7cecdd6a4239ee87e1c629bfd7dae208142ddbb24528d9a9274eb2bc4e44"}, ] [package.dependencies] asgiref = ">=3.0,<4.0" opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" [package.extras] instruments = ["asgiref (>=3.0,<4.0)"] [[package]] name = "opentelemetry-instrumentation-dbapi" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry Database API instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_dbapi-0.52b1-py3-none-any.whl", hash = "sha256:47e54d26ad39f3951c7f3b4d4fb685a3c75445cfd57fcff2e92c416575c568ab"}, - {file = "opentelemetry_instrumentation_dbapi-0.52b1.tar.gz", hash = "sha256:62a6c37b659f6aa5476f12fb76c78f4ad27c49fb71a8a2c11609afcbb84f1e1c"}, + {file = "opentelemetry_instrumentation_dbapi-0.53b0-py3-none-any.whl", hash = "sha256:0a3134a6ba366b0b8455412f86d0e689657db5f2b7296dc4d76810aa33257943"}, + {file = "opentelemetry_instrumentation_dbapi-0.53b0.tar.gz", hash = "sha256:64baa1499094789e8af65384f9d100238417c05d419d940fdcce2db30353f882"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" wrapt = ">=1.0.0,<2.0.0" [[package]] name = "opentelemetry-instrumentation-django" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry Instrumentation for Django" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_django-0.52b1-py3-none-any.whl", hash = "sha256:895dcc551fa9c38c62e23d6b66ef250b20ff0afd7a39f8822ec61a2929dfc7c7"}, - {file = "opentelemetry_instrumentation_django-0.52b1.tar.gz", hash = "sha256:2541819564dae5edb0afd023de25d35761d8943aa88e6344b1e52f4fe036ccb6"}, + {file = "opentelemetry_instrumentation_django-0.53b0-py3-none-any.whl", hash = "sha256:0ca1a43547f6fc2041b2ef530611a5c0f0becd204b68dd4a630a35cf2ba2fb0d"}, + {file = "opentelemetry_instrumentation_django-0.53b0.tar.gz", hash = "sha256:3870456aa463c909f072ac0ff8d49a2a5dd8fb40ac09ae5d4d6b08cd2acc4d41"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-instrumentation-wsgi = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-instrumentation-wsgi = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" [package.extras] -asgi = ["opentelemetry-instrumentation-asgi (==0.52b1)"] +asgi = ["opentelemetry-instrumentation-asgi (==0.53b0)"] instruments = ["django (>=1.10)"] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry FastAPI Instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_fastapi-0.52b1-py3-none-any.whl", hash = "sha256:73c8804f053c5eb2fd2c948218bff9561f1ef65e89db326a6ab0b5bf829969f4"}, - {file = "opentelemetry_instrumentation_fastapi-0.52b1.tar.gz", hash = "sha256:d26ab15dc49e041301d5c2571605b8f5c3a6ee4a85b60940338f56c120221e98"}, + {file = "opentelemetry_instrumentation_fastapi-0.53b0-py3-none-any.whl", hash = "sha256:c29b7b3f5ca5aeb89436a605ac481467630bc761a241cc4258058ba00e6d40ed"}, + {file = "opentelemetry_instrumentation_fastapi-0.53b0.tar.gz", hash = "sha256:a901ded31595d6e64d35c92379c08d8314baffc8715653ac42349b6140c725ce"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-instrumentation-asgi = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-instrumentation-asgi = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" [package.extras] instruments = ["fastapi (>=0.58,<1.0)"] [[package]] name = "opentelemetry-instrumentation-flask" -version = "0.52b1" +version = "0.53b0" description = "Flask instrumentation for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_flask-0.52b1-py3-none-any.whl", hash = "sha256:3c8b83147838bef24aac0182f0d49865321efba4cb1f96629f460330d21d0fa9"}, - {file = "opentelemetry_instrumentation_flask-0.52b1.tar.gz", hash = "sha256:c8bc64da425ccbadb4a2ee5e8d99045e2282bfbf63bc9be07c386675839d00be"}, + {file = "opentelemetry_instrumentation_flask-0.53b0-py3-none-any.whl", hash = "sha256:2b6c8f7236eccac416b8b90a2c19e8b1cb03f501fd35aefd2ea8d98d40f04f11"}, + {file = "opentelemetry_instrumentation_flask-0.53b0.tar.gz", hash = "sha256:cf14683729e3ec19051677affbf858ace1c5bbf51820defe677bf4508b6cd61b"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-instrumentation-wsgi = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-instrumentation-wsgi = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" packaging = ">=21.0" [package.extras] @@ -4129,76 +4451,76 @@ instruments = ["flask (>=1.0)"] [[package]] name = "opentelemetry-instrumentation-psycopg2" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry psycopg2 instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_psycopg2-0.52b1-py3-none-any.whl", hash = "sha256:51ac9f3d0b83889a1df2fc1342d86887142c2b70d8532043bc49b36fe95ea9d8"}, - {file = "opentelemetry_instrumentation_psycopg2-0.52b1.tar.gz", hash = "sha256:5bbdb2a2973aae9402946c995e277b1f76e467faebc40ac0f8da51c701918bb4"}, + {file = "opentelemetry_instrumentation_psycopg2-0.53b0-py3-none-any.whl", hash = "sha256:b56af048a54e58ed8b39765aabd418b19ddaa4846d049d0880f11bc27297f5ae"}, + {file = "opentelemetry_instrumentation_psycopg2-0.53b0.tar.gz", hash = "sha256:a269778a600c988d6da9e28d38ac762507632d6ba8542b0987ca195a3acc560a"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-instrumentation-dbapi = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-instrumentation-dbapi = "0.53b0" [package.extras] instruments = ["psycopg2 (>=2.7.3.1)", "psycopg2-binary (>=2.7.3.1)"] [[package]] name = "opentelemetry-instrumentation-requests" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry requests instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_requests-0.52b1-py3-none-any.whl", hash = "sha256:58ae3c415543d8ba2b0091b81ac13b65f2993adef0a4b9a5d3d7ebbe0023986a"}, - {file = "opentelemetry_instrumentation_requests-0.52b1.tar.gz", hash = "sha256:711a2ef90e32a0ffd4650b21376b8e102473845ba9121efca0d94314d529b501"}, + {file = "opentelemetry_instrumentation_requests-0.53b0-py3-none-any.whl", hash = "sha256:d3fe68fee86e281223d5590f1c37f69b86db7dacd6d69e4a879a32c2281cc2c7"}, + {file = "opentelemetry_instrumentation_requests-0.53b0.tar.gz", hash = "sha256:e6e1d2e9d2e98ce6993f0f4224e5f5cd42cb8843cf594aaa6ff436682c0a200a"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" [package.extras] instruments = ["requests (>=2.0,<3.0)"] [[package]] name = "opentelemetry-instrumentation-urllib" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry urllib instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_urllib-0.52b1-py3-none-any.whl", hash = "sha256:559ee1228194cf025c22b2515bdb855aefd9cec19596a7b30df5f092fbc72e56"}, - {file = "opentelemetry_instrumentation_urllib-0.52b1.tar.gz", hash = "sha256:1364c742eaec56e11bab8723aecde378e438f86f753d93fcbf5ca8f6e1073a5c"}, + {file = "opentelemetry_instrumentation_urllib-0.53b0-py3-none-any.whl", hash = "sha256:6c650f13b37f1ce9a3b743d184491b54cf099dedd95d3ac259b6404fb06b686b"}, + {file = "opentelemetry_instrumentation_urllib-0.53b0.tar.gz", hash = "sha256:1cbbc161a5e2a6a268edce777eb766aae7de79f74b177669eb4f4a20e4cf2f7c"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" [[package]] name = "opentelemetry-instrumentation-urllib3" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry urllib3 instrumentation" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_urllib3-0.52b1-py3-none-any.whl", hash = "sha256:4011bac1639a6336c443252d93709eff17e316523f335ddee4ddb47bf464305e"}, - {file = "opentelemetry_instrumentation_urllib3-0.52b1.tar.gz", hash = "sha256:b607aefd2c02ff7fbf6eea4b863f63348e64b29592ffa90dcc970a5bbcbe3c6b"}, + {file = "opentelemetry_instrumentation_urllib3-0.53b0-py3-none-any.whl", hash = "sha256:ab743da7e564068fc7aaaf686c7cb219fe54ff2bcf8aa2cdb7cde14000ed9679"}, + {file = "opentelemetry_instrumentation_urllib3-0.53b0.tar.gz", hash = "sha256:6741157ade407d971c4ffabda843461c0ad1d3d3b87eecdf8c4f64d46ccfa395"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" wrapt = ">=1.0.0,<2.0.0" [package.extras] @@ -4206,20 +4528,20 @@ instruments = ["urllib3 (>=1.0.0,<3.0.0)"] [[package]] name = "opentelemetry-instrumentation-wsgi" -version = "0.52b1" +version = "0.53b0" description = "WSGI Middleware for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_instrumentation_wsgi-0.52b1-py3-none-any.whl", hash = "sha256:13d19958bb63df0dc32df23a047e94fe5db66151d29b17c01b1d751dd84029f8"}, - {file = "opentelemetry_instrumentation_wsgi-0.52b1.tar.gz", hash = "sha256:2c0534cacae594ef8c749edf3d1a8bce78e959a1b40efbc36f1b59d1f7977089"}, + {file = "opentelemetry_instrumentation_wsgi-0.53b0-py3-none-any.whl", hash = "sha256:b6311875a70d709b732d2a52401b4b7076d6788ecd399115a5932c8261ea4a25"}, + {file = "opentelemetry_instrumentation_wsgi-0.53b0.tar.gz", hash = "sha256:4a99b450045ea5487499e1219f5ef52c993edb520c234f688f6cc427ef289368"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.52b1" -opentelemetry-semantic-conventions = "0.52b1" -opentelemetry-util-http = "0.52b1" +opentelemetry-instrumentation = "0.53b0" +opentelemetry-semantic-conventions = "0.53b0" +opentelemetry-util-http = "0.53b0" [[package]] name = "opentelemetry-resource-detector-azure" @@ -4237,44 +4559,44 @@ opentelemetry-sdk = ">=1.21,<2.0" [[package]] name = "opentelemetry-sdk" -version = "1.31.1" +version = "1.32.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_sdk-1.31.1-py3-none-any.whl", hash = "sha256:882d021321f223e37afaca7b4e06c1d8bbc013f9e17ff48a7aa017460a8e7dae"}, - {file = "opentelemetry_sdk-1.31.1.tar.gz", hash = "sha256:c95f61e74b60769f8ff01ec6ffd3d29684743404603df34b20aa16a49dc8d903"}, + {file = "opentelemetry_sdk-1.32.0-py3-none-any.whl", hash = "sha256:ed252d035c22a15536c1f603ca089298daab60850fc2f5ddfa95d95cc1c043ea"}, + {file = "opentelemetry_sdk-1.32.0.tar.gz", hash = "sha256:5ff07fb371d1ab1189fa7047702e2e888b5403c5efcbb18083cae0d5aa5f58d2"}, ] [package.dependencies] -opentelemetry-api = "1.31.1" -opentelemetry-semantic-conventions = "0.52b1" +opentelemetry-api = "1.32.0" +opentelemetry-semantic-conventions = "0.53b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.52b1" +version = "0.53b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_semantic_conventions-0.52b1-py3-none-any.whl", hash = "sha256:72b42db327e29ca8bb1b91e8082514ddf3bbf33f32ec088feb09526ade4bc77e"}, - {file = "opentelemetry_semantic_conventions-0.52b1.tar.gz", hash = "sha256:7b3d226ecf7523c27499758a58b542b48a0ac8d12be03c0488ff8ec60c5bae5d"}, + {file = "opentelemetry_semantic_conventions-0.53b0-py3-none-any.whl", hash = "sha256:561da89f766ab51615c0e72b12329e0a1bc16945dbd62c8646ffc74e36a1edff"}, + {file = "opentelemetry_semantic_conventions-0.53b0.tar.gz", hash = "sha256:05b7908e1da62d72f9bf717ed25c72f566fe005a2dd260c61b11e025f2552cf6"}, ] [package.dependencies] deprecated = ">=1.2.6" -opentelemetry-api = "1.31.1" +opentelemetry-api = "1.32.0" [[package]] name = "opentelemetry-util-http" -version = "0.52b1" +version = "0.53b0" description = "Web util for OpenTelemetry" optional = false python-versions = ">=3.8" files = [ - {file = "opentelemetry_util_http-0.52b1-py3-none-any.whl", hash = "sha256:6a6ab6bfa23fef96f4995233e874f67602adf9d224895981b4ab9d4dde23de78"}, - {file = "opentelemetry_util_http-0.52b1.tar.gz", hash = "sha256:c03c8c23f1b75fadf548faece7ead3aecd50761c5593a2b2831b48730eee5b31"}, + {file = "opentelemetry_util_http-0.53b0-py3-none-any.whl", hash = "sha256:eca40d8cd1c1149081142c44756c0a2da0be306931339b839e1b436a9de101a4"}, + {file = "opentelemetry_util_http-0.53b0.tar.gz", hash = "sha256:521111872be0cdfd4346e15e9d4822aeeb8501b094c721ef49c26277b286084e"}, ] [[package]] @@ -4659,6 +4981,52 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "preshed" +version = "3.0.9" +description = "Cython hash table that trusts the keys are pre-hashed" +optional = false +python-versions = ">=3.6" +files = [ + {file = "preshed-3.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f96ef4caf9847b2bb9868574dcbe2496f974e41c2b83d6621c24fb4c3fc57e3"}, + {file = "preshed-3.0.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a61302cf8bd30568631adcdaf9e6b21d40491bd89ba8ebf67324f98b6c2a2c05"}, + {file = "preshed-3.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99499e8a58f58949d3f591295a97bca4e197066049c96f5d34944dd21a497193"}, + {file = "preshed-3.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea6b6566997dc3acd8c6ee11a89539ac85c77275b4dcefb2dc746d11053a5af8"}, + {file = "preshed-3.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:bfd523085a84b1338ff18f61538e1cfcdedc4b9e76002589a301c364d19a2e36"}, + {file = "preshed-3.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7c2364da27f2875524ce1ca754dc071515a9ad26eb5def4c7e69129a13c9a59"}, + {file = "preshed-3.0.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182138033c0730c683a6d97e567ceb8a3e83f3bff5704f300d582238dbd384b3"}, + {file = "preshed-3.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:345a10be3b86bcc6c0591d343a6dc2bfd86aa6838c30ced4256dfcfa836c3a64"}, + {file = "preshed-3.0.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51d0192274aa061699b284f9fd08416065348edbafd64840c3889617ee1609de"}, + {file = "preshed-3.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:96b857d7a62cbccc3845ac8c41fd23addf052821be4eb987f2eb0da3d8745aa1"}, + {file = "preshed-3.0.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4fe6720012c62e6d550d6a5c1c7ad88cacef8388d186dad4bafea4140d9d198"}, + {file = "preshed-3.0.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e04f05758875be9751e483bd3c519c22b00d3b07f5a64441ec328bb9e3c03700"}, + {file = "preshed-3.0.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a55091d0e395f1fdb62ab43401bb9f8b46c7d7794d5b071813c29dc1ab22fd0"}, + {file = "preshed-3.0.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7de8f5138bcac7870424e09684dc3dd33c8e30e81b269f6c9ede3d8c7bb8e257"}, + {file = "preshed-3.0.9-cp312-cp312-win_amd64.whl", hash = "sha256:24229c77364628743bc29c5620c5d6607ed104f0e02ae31f8a030f99a78a5ceb"}, + {file = "preshed-3.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73b0f7ecc58095ebbc6ca26ec806008ef780190fe685ce471b550e7eef58dc2"}, + {file = "preshed-3.0.9-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cb90ecd5bec71c21d95962db1a7922364d6db2abe284a8c4b196df8bbcc871e"}, + {file = "preshed-3.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:e304a0a8c9d625b70ba850c59d4e67082a6be9c16c4517b97850a17a282ebee6"}, + {file = "preshed-3.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1fa6d3d5529b08296ff9b7b4da1485c080311fd8744bbf3a86019ff88007b382"}, + {file = "preshed-3.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1e5173809d85edd420fc79563b286b88b4049746b797845ba672cf9435c0e7"}, + {file = "preshed-3.0.9-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fe81eb21c7d99e8b9a802cc313b998c5f791bda592903c732b607f78a6b7dc4"}, + {file = "preshed-3.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:78590a4a952747c3766e605ce8b747741005bdb1a5aa691a18aae67b09ece0e6"}, + {file = "preshed-3.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3452b64d97ce630e200c415073040aa494ceec6b7038f7a2a3400cbd7858e952"}, + {file = "preshed-3.0.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ac970d97b905e9e817ec13d31befd5b07c9cfec046de73b551d11a6375834b79"}, + {file = "preshed-3.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eebaa96ece6641cd981491cba995b68c249e0b6877c84af74971eacf8990aa19"}, + {file = "preshed-3.0.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d473c5f6856e07a88d41fe00bb6c206ecf7b34c381d30de0b818ba2ebaf9406"}, + {file = "preshed-3.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:0de63a560f10107a3f0a9e252cc3183b8fdedcb5f81a86938fd9f1dcf8a64adf"}, + {file = "preshed-3.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3a9ad9f738084e048a7c94c90f40f727217387115b2c9a95c77f0ce943879fcd"}, + {file = "preshed-3.0.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a671dfa30b67baa09391faf90408b69c8a9a7f81cb9d83d16c39a182355fbfce"}, + {file = "preshed-3.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23906d114fc97c17c5f8433342495d7562e96ecfd871289c2bb2ed9a9df57c3f"}, + {file = "preshed-3.0.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:778cf71f82cedd2719b256f3980d556d6fb56ec552334ba79b49d16e26e854a0"}, + {file = "preshed-3.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:a6e579439b329eb93f32219ff27cb358b55fbb52a4862c31a915a098c8a22ac2"}, + {file = "preshed-3.0.9.tar.gz", hash = "sha256:721863c5244ffcd2651ad0928951a2c7c77b102f4e11a251ad85d37ee7621660"}, +] + +[package.dependencies] +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=0.28.0,<1.1.0" + [[package]] name = "prometheus-client" version = "0.21.1" @@ -4981,13 +5349,13 @@ files = [ [[package]] name = "pydantic" -version = "2.11.2" +version = "2.11.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" files = [ - {file = "pydantic-2.11.2-py3-none-any.whl", hash = "sha256:7f17d25846bcdf89b670a86cdfe7b29a9f1c9ca23dee154221c9aa81845cfca7"}, - {file = "pydantic-2.11.2.tar.gz", hash = "sha256:2138628e050bd7a1e70b91d4bf4a91167f4ad76fdb83209b107c8d84b854917e"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] @@ -5967,29 +6335,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.11.4" +version = "0.11.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.11.4-py3-none-linux_armv6l.whl", hash = "sha256:d9f4a761ecbde448a2d3e12fb398647c7f0bf526dbc354a643ec505965824ed2"}, - {file = "ruff-0.11.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8c1747d903447d45ca3d40c794d1a56458c51e5cc1bc77b7b64bd2cf0b1626cc"}, - {file = "ruff-0.11.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:51a6494209cacca79e121e9b244dc30d3414dac8cc5afb93f852173a2ecfc906"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f171605f65f4fc49c87f41b456e882cd0c89e4ac9d58e149a2b07930e1d466f"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebf99ea9af918878e6ce42098981fc8c1db3850fef2f1ada69fb1dcdb0f8e79e"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edad2eac42279df12e176564a23fc6f4aaeeb09abba840627780b1bb11a9d223"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f103a848be9ff379fc19b5d656c1f911d0a0b4e3e0424f9532ececf319a4296e"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:193e6fac6eb60cc97b9f728e953c21cc38a20077ed64f912e9d62b97487f3f2d"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7af4e5f69b7c138be8dcffa5b4a061bf6ba6a3301f632a6bce25d45daff9bc99"}, - {file = "ruff-0.11.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:126b1bf13154aa18ae2d6c3c5efe144ec14b97c60844cfa6eb960c2a05188222"}, - {file = "ruff-0.11.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8806daaf9dfa881a0ed603f8a0e364e4f11b6ed461b56cae2b1c0cab0645304"}, - {file = "ruff-0.11.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5d94bb1cc2fc94a769b0eb975344f1b1f3d294da1da9ddbb5a77665feb3a3019"}, - {file = "ruff-0.11.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:995071203d0fe2183fc7a268766fd7603afb9996785f086b0d76edee8755c896"}, - {file = "ruff-0.11.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7a37ca937e307ea18156e775a6ac6e02f34b99e8c23fe63c1996185a4efe0751"}, - {file = "ruff-0.11.4-py3-none-win32.whl", hash = "sha256:0e9365a7dff9b93af933dab8aebce53b72d8f815e131796268709890b4a83270"}, - {file = "ruff-0.11.4-py3-none-win_amd64.whl", hash = "sha256:5a9fa1c69c7815e39fcfb3646bbfd7f528fa8e2d4bebdcf4c2bd0fa037a255fb"}, - {file = "ruff-0.11.4-py3-none-win_arm64.whl", hash = "sha256:d435db6b9b93d02934cf61ef332e66af82da6d8c69aefdea5994c89997c7a0fc"}, - {file = "ruff-0.11.4.tar.gz", hash = "sha256:f45bd2fb1a56a5a85fae3b95add03fb185a0b30cf47f5edc92aa0355ca1d7407"}, + {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, + {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, + {file = "ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783"}, + {file = "ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe"}, + {file = "ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800"}, + {file = "ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e"}, + {file = "ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef"}, ] [[package]] @@ -6214,6 +6582,114 @@ files = [ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] +[[package]] +name = "spacy" +version = "3.8.5" +description = "Industrial-strength Natural Language Processing (NLP) in Python" +optional = false +python-versions = "<3.13,>=3.9" +files = [ + {file = "spacy-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b333745f48c0c005d5ba2aaf7b955a06532e229785b758c09d3d07c1f40dea1"}, + {file = "spacy-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:734a7865936b514c0813ba9e34e7d11484bbef2b678578d850afa67e499b8854"}, + {file = "spacy-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27bab13056ce2943552fbd26668dcd8e33a9a182d981a4612ff3cd176e0f89c7"}, + {file = "spacy-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04f12e3608ec3fe4797e5b964bfb09ca569a343970bd20140ed6bae5beda8e80"}, + {file = "spacy-3.8.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a3ef2b91d462c0834b4eb350b914f202eded9e86cdbbae8f61b69d75f2bd0022"}, + {file = "spacy-3.8.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5b1e092407eee83ebe1df7dff446421fd97ccf89824c2eea2ab71a350d10e014"}, + {file = "spacy-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:376417b44b899d35f979b11cf7e00c14f5d728a3bf61e56272dbfcf9a0fd4be5"}, + {file = "spacy-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:489bc473e47db9e3a84a388bb3ed605f9909b6f38d3a8232c106c53bd8201c73"}, + {file = "spacy-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aef2cc29aed14645408d7306e973eeb6587029c0e7cf8a06b8edc9c6e465781f"}, + {file = "spacy-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e6014ce5823e0b056d5a3d19f32acefa45941a2521ebed29bb37a5566b04d41"}, + {file = "spacy-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba8f76cb1df0eac49f167bd29127b20670dcc258b6bf70639aea325adc25080"}, + {file = "spacy-3.8.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dd16d593438b322f21d4fc75d8e1ee8581a1383e185ef0bd9bcdf960f15e3dff"}, + {file = "spacy-3.8.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c418d5fd425634dbce63f479096a20e1eb030b750167dcf5350f76463c8a6ec4"}, + {file = "spacy-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:57bdb288edfb6477893333497e541d16116923105026a49811215d1c22210c5b"}, + {file = "spacy-3.8.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3a7c8b21df409ddfb2c93bb32fa1fcaca8dc9d49d2bb49e428a2d8a67107b38a"}, + {file = "spacy-3.8.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c709e15a72f95b386df78330516cbd7c71d59ec92fc4342805ed69aeebb06f03"}, + {file = "spacy-3.8.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e803450298bbf8ae59a4d802dc308325c5da6e3b49339335040e4da3406e05d"}, + {file = "spacy-3.8.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be20f328b1581a840afc3439c4ed7ce991f2cc3848c670f5bc78d2027286ae80"}, + {file = "spacy-3.8.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b06a7a866e528cd7f65041562bc869e6851b404a75fddec6614b64603f66cc8e"}, + {file = "spacy-3.8.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe0b9db300a2a385220e3cad3ffbfcfd8ef4cd28dc038eca706b0bd2797e305e"}, + {file = "spacy-3.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:4a54587deda8ecea5ceb3d9f81bd40228d8a3c7bda4bc5fd06f7cf3364da8bd9"}, + {file = "spacy-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f24d3e78c63a99d608b03bb90edb0eaa35c92bd0e734c5b8cc0781212fa85f5f"}, + {file = "spacy-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560ee35c9c029b03294e99bfbb7b936d1e8d34c3cf0e003bb70c348c8af47751"}, + {file = "spacy-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa6d1b87d66e842f632d8bda57aeb26d06555ff47de6d23df8e79f09a8b8cafb"}, + {file = "spacy-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b94495dab9a73d7990c8ae602b01538e38eeb4ccc23e939ad238a2bb90bd22d1"}, + {file = "spacy-3.8.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8af92fb74ad8318c19a1d71900e574ece691d50f50f9531414a61b89832e3c87"}, + {file = "spacy-3.8.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f4ec788006b4174a4c04ceaef28c3080c1536bb90789aa6d77481c0284e50842"}, + {file = "spacy-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:13792e7b8ed81821867e218ec97e0b8f075ee5751d1a04288dd81ec35e430d16"}, + {file = "spacy-3.8.5.tar.gz", hash = "sha256:38bc8b877fb24f414905ff179620031607cd31fe6f900d67a06730142715651c"}, +] + +[package.dependencies] +catalogue = ">=2.0.6,<2.1.0" +cymem = ">=2.0.2,<2.1.0" +jinja2 = "*" +langcodes = ">=3.2.0,<4.0.0" +murmurhash = ">=0.28.0,<1.1.0" +numpy = {version = ">=1.19.0", markers = "python_version >= \"3.9\""} +packaging = ">=20.0" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +requests = ">=2.13.0,<3.0.0" +setuptools = "*" +spacy-legacy = ">=3.0.11,<3.1.0" +spacy-loggers = ">=1.0.0,<2.0.0" +srsly = ">=2.4.3,<3.0.0" +thinc = ">=8.3.4,<8.4.0" +tqdm = ">=4.38.0,<5.0.0" +typer = ">=0.3.0,<1.0.0" +wasabi = ">=0.9.1,<1.2.0" +weasel = ">=0.1.0,<0.5.0" + +[package.extras] +apple = ["thinc-apple-ops (>=1.0.0,<2.0.0)"] +cuda = ["cupy (>=5.0.0b4,<13.0.0)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0,<13.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4,<13.0.0)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4,<13.0.0)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4,<13.0.0)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4,<13.0.0)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4,<13.0.0)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4,<13.0.0)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4,<13.0.0)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4,<13.0.0)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4,<13.0.0)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4,<13.0.0)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4,<13.0.0)"] +cuda11x = ["cupy-cuda11x (>=11.0.0,<13.0.0)"] +cuda12x = ["cupy-cuda12x (>=11.5.0,<13.0.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4,<13.0.0)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4,<13.0.0)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4,<13.0.0)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4,<13.0.0)"] +ja = ["sudachidict_core (>=20211220)", "sudachipy (>=0.5.2,!=0.6.1)"] +ko = ["natto-py (>=0.9.0)"] +lookups = ["spacy_lookups_data (>=1.0.3,<1.1.0)"] +th = ["pythainlp (>=2.0)"] +transformers = ["spacy_transformers (>=1.1.2,<1.4.0)"] + +[[package]] +name = "spacy-legacy" +version = "3.0.12" +description = "Legacy registered functions for spaCy backwards compatibility" +optional = false +python-versions = ">=3.6" +files = [ + {file = "spacy-legacy-3.0.12.tar.gz", hash = "sha256:b37d6e0c9b6e1d7ca1cf5bc7152ab64a4c4671f59c85adaf7a3fcb870357a774"}, + {file = "spacy_legacy-3.0.12-py2.py3-none-any.whl", hash = "sha256:476e3bd0d05f8c339ed60f40986c07387c0a71479245d6d0f4298dbd52cda55f"}, +] + +[[package]] +name = "spacy-loggers" +version = "1.0.5" +description = "Logging utilities for SpaCy" +optional = false +python-versions = ">=3.6" +files = [ + {file = "spacy-loggers-1.0.5.tar.gz", hash = "sha256:d60b0bdbf915a60e516cc2e653baeff946f0cfc461b452d11a4d5458c6fe5f24"}, + {file = "spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645"}, +] + [[package]] name = "speechrecognition" version = "3.14.2" @@ -6239,6 +6715,54 @@ openai = ["httpx (<0.28)", "openai"] pocketsphinx = ["pocketsphinx"] whisper-local = ["openai-whisper", "soundfile"] +[[package]] +name = "srsly" +version = "2.5.1" +description = "Modern high-performance serialization utilities for Python" +optional = false +python-versions = "<3.14,>=3.9" +files = [ + {file = "srsly-2.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d0cda6f65cc0dd1daf47e856b0d6c5d51db8a9343c5007723ca06903dcfe367d"}, + {file = "srsly-2.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf643e6f45c266cfacea54997a1f9cfe0113fadac1ac21a1ec5b200cfe477ba0"}, + {file = "srsly-2.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:467ed25ddab09ca9404fda92519a317c803b5ea0849f846e74ba8b7843557df5"}, + {file = "srsly-2.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f8113d202664b7d31025bdbe40b9d3536e8d7154d09520b6a1955818fa6d622"}, + {file = "srsly-2.5.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:794d39fccd2b333d24f1b445acc78daf90f3f37d3c0f6f0167f25c56961804e7"}, + {file = "srsly-2.5.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df7fd77457c4d6c630f700b1019a8ad173e411e7cf7cfdea70e5ed86b608083b"}, + {file = "srsly-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:1a4dddb2edb8f7974c9aa5ec46dc687a75215b3bbdc815ce3fc9ea68fe1e94b5"}, + {file = "srsly-2.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58f0736794ce00a71d62a39cbba1d62ea8d5be4751df956e802d147da20ecad7"}, + {file = "srsly-2.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8269c40859806d71920396d185f4f38dc985cdb6a28d3a326a701e29a5f629"}, + {file = "srsly-2.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889905900401fefc1032e22b73aecbed8b4251aa363f632b2d1f86fc16f1ad8e"}, + {file = "srsly-2.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf454755f22589df49c25dc799d8af7b47dce3d861dded35baf0f0b6ceab4422"}, + {file = "srsly-2.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc0607c8a59013a51dde5c1b4e465558728e9e0a35dcfa73c7cbefa91a0aad50"}, + {file = "srsly-2.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d5421ba3ab3c790e8b41939c51a1d0f44326bfc052d7a0508860fb79a47aee7f"}, + {file = "srsly-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:b96ea5a9a0d0379a79c46d255464a372fb14c30f59a8bc113e4316d131a530ab"}, + {file = "srsly-2.5.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:683b54ed63d7dfee03bc2abc4b4a5f2152f81ec217bbadbac01ef1aaf2a75790"}, + {file = "srsly-2.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:459d987130e57e83ce9e160899afbeb871d975f811e6958158763dd9a8a20f23"}, + {file = "srsly-2.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:184e3c98389aab68ff04aab9095bd5f1a8e5a72cc5edcba9d733bac928f5cf9f"}, + {file = "srsly-2.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00c2a3e4856e63b7efd47591d049aaee8e5a250e098917f50d93ea68853fab78"}, + {file = "srsly-2.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:366b4708933cd8d6025c13c2cea3331f079c7bb5c25ec76fca392b6fc09818a0"}, + {file = "srsly-2.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8a0b03c64eb6e150d772c5149befbadd981cc734ab13184b0561c17c8cef9b1"}, + {file = "srsly-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:7952538f6bba91b9d8bf31a642ac9e8b9ccc0ccbb309feb88518bfb84bb0dc0d"}, + {file = "srsly-2.5.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84b372f7ef1604b4a5b3cee1571993931f845a5b58652ac01bcb32c52586d2a8"}, + {file = "srsly-2.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6ac3944c112acb3347a39bfdc2ebfc9e2d4bace20fe1c0b764374ac5b83519f2"}, + {file = "srsly-2.5.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6118f9c4b221cde0a990d06a42c8a4845218d55b425d8550746fe790acf267e9"}, + {file = "srsly-2.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7481460110d9986781d9e4ac0f5f991f1d6839284a80ad268625f9a23f686950"}, + {file = "srsly-2.5.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e57b8138082f09e35db60f99757e16652489e9e3692471d8e0c39aa95180688"}, + {file = "srsly-2.5.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bab90b85a63a1fe0bbc74d373c8bb9bb0499ddfa89075e0ebe8d670f12d04691"}, + {file = "srsly-2.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:e73712be1634b5e1de6f81c273a7d47fe091ad3c79dc779c03d3416a5c117cee"}, + {file = "srsly-2.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d3b846ece78ec02aee637c1028cbbc6f0756faf8b01af190e9bbc8705321fc0"}, + {file = "srsly-2.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1529f5beb25a736ba1177f55532a942c786a8b4fe544bf9e9fbbebc5c63f4224"}, + {file = "srsly-2.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3c689a9f8dfa25c56533a3f145693b20ddc56415e25035e526ff7a7251a8c11"}, + {file = "srsly-2.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5982d01c7ddd62dbdb778a8bd176513d4d093cc56ef925fa2b0e13f71ed1809a"}, + {file = "srsly-2.5.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:196d3a2cc74758b2284e45f192e0df55d032b70be8481e207affc03216ddb464"}, + {file = "srsly-2.5.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:de756942e08ac3d8e8f5ae4595855932d7e4357f63adac6925b516c168f24711"}, + {file = "srsly-2.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:08b4045506cd4b63d2bb0da523156ab3ee67719aac3ca8cb591d6ed7ee55080e"}, + {file = "srsly-2.5.1.tar.gz", hash = "sha256:ab1b4bf6cf3e29da23dae0493dd1517fb787075206512351421b89b4fc27c77e"}, +] + +[package.dependencies] +catalogue = ">=2.0.3,<2.1.0" + [[package]] name = "stack-data" version = "0.6.3" @@ -6379,6 +6903,95 @@ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] +[[package]] +name = "textblob" +version = "0.18.0.post0" +description = "Simple, Pythonic text processing. Sentiment analysis, part-of-speech tagging, noun phrase parsing, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "textblob-0.18.0.post0-py3-none-any.whl", hash = "sha256:dd0c7ec4eb7b9346ec0a3f136a63eba13e0f59890d2a693d3d6aeb8371949dca"}, + {file = "textblob-0.18.0.post0.tar.gz", hash = "sha256:8131c52c630bcdf61d04c359f939c98d5b836a01fba224d9e7ae22fc274e0ccb"}, +] + +[package.dependencies] +nltk = ">=3.8" + +[package.extras] +dev = ["pre-commit (>=3.5,<4.0)", "textblob[tests]", "tox"] +docs = ["PyYAML (==6.0.1)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)"] +tests = ["numpy", "pytest"] + +[[package]] +name = "thinc" +version = "8.3.4" +description = "A refreshing functional take on deep learning, compatible with your favorite libraries" +optional = false +python-versions = "<3.13,>=3.9" +files = [ + {file = "thinc-8.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:916ea79a7c7462664be9435679b7769b4fc1ecea3886db6da6118e4eb5cc8c8b"}, + {file = "thinc-8.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c985ce9cf82a611f4f348c721372d073537ca0e8b7bbb8bd865c1598ddd79d1"}, + {file = "thinc-8.3.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fff4b30f8513832d13a31486e9074a7020de3d48f8a3d1527e369c242d6ebe9"}, + {file = "thinc-8.3.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a9ee46d19b9f4cac13a5539f97978c857338a31e4bf8d9b3a7741dcbc792220f"}, + {file = "thinc-8.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:d08529d53f8652e15e4f3c0f6953e73f85cc71d3b6e4750d2d9ace23616dbe8f"}, + {file = "thinc-8.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8bb4b47358a1855803b375f4432cefdf373f46ef249b554418d2e77c7323040"}, + {file = "thinc-8.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ed92f9a34b9794f51fcd48467c863f4eb7c5b41559aef6ef3c980c21378fec"}, + {file = "thinc-8.3.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85691fca84a6a1506f7ddbd2c1706a5524d56f65582e76b2e260a06d9e83e86d"}, + {file = "thinc-8.3.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eae1573fc19e514defc1bfd4f93f0b4bfc1dcefdb6d70bad1863825747f24800"}, + {file = "thinc-8.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:81e8638f9bdc38e366674acc4b63cf7c6267266a15477963a5db21b3d9f1aa36"}, + {file = "thinc-8.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c9da6375b106df5186bd2bfd1273bc923c01ab7d482f8942e4ee528a28965c3a"}, + {file = "thinc-8.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:07091c6b5faace50857c4cf0982204969d77388d0a6f156dd2442297dceeb838"}, + {file = "thinc-8.3.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40ad71bcd8b1b9daa0462e1255b1c1e86e901c2fd773966601f44a95878032"}, + {file = "thinc-8.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb10823b3a3f1c6440998b11bf9a3571dd859feaed0fdb510a1c1097d9dc6a86"}, + {file = "thinc-8.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5e5e7bf5dae142fd50ed9785971292c4aab4d9ed18e4947653b6a0584d5227c"}, + {file = "thinc-8.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:960366f41f0d5c4cecdf8610d03bdf80b14a959a7fe94008b788a5336d388781"}, + {file = "thinc-8.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d85babfae9b31e2e20f4884787b1391ca126f84e9b9f7f498990c07f7019f848"}, + {file = "thinc-8.3.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8791c87857c474499455bfdd3f58432e2dc1e2cdadf46eb2f3c2293851a8a837"}, + {file = "thinc-8.3.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c95456cbc1344ab9041c2e16c9fa065ac2b56520929a5a594b3c80ddda136b1e"}, + {file = "thinc-8.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:11e6e14c1bfdb7c456f3da19dcf94def8304a7b279329f328e55062a292bc79f"}, + {file = "thinc-8.3.4.tar.gz", hash = "sha256:b5925482498bbb6dca0771e375b35c915818f735891e93d93a662dab15f6ffd8"}, +] + +[package.dependencies] +blis = ">=1.2.0,<1.3.0" +catalogue = ">=2.0.4,<2.1.0" +confection = ">=0.0.1,<1.0.0" +cymem = ">=2.0.2,<2.1.0" +murmurhash = ">=1.0.2,<1.1.0" +numpy = {version = ">=1.19.0,<3.0.0", markers = "python_version >= \"3.9\""} +packaging = ">=20.0" +preshed = ">=3.0.2,<3.1.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +setuptools = "*" +srsly = ">=2.4.0,<3.0.0" +wasabi = ">=0.8.1,<1.2.0" + +[package.extras] +apple = ["thinc-apple-ops (>=1.0.0,<2.0.0)"] +cuda = ["cupy (>=5.0.0b4)"] +cuda-autodetect = ["cupy-wheel (>=11.0.0)"] +cuda100 = ["cupy-cuda100 (>=5.0.0b4)"] +cuda101 = ["cupy-cuda101 (>=5.0.0b4)"] +cuda102 = ["cupy-cuda102 (>=5.0.0b4)"] +cuda110 = ["cupy-cuda110 (>=5.0.0b4)"] +cuda111 = ["cupy-cuda111 (>=5.0.0b4)"] +cuda112 = ["cupy-cuda112 (>=5.0.0b4)"] +cuda113 = ["cupy-cuda113 (>=5.0.0b4)"] +cuda114 = ["cupy-cuda114 (>=5.0.0b4)"] +cuda115 = ["cupy-cuda115 (>=5.0.0b4)"] +cuda116 = ["cupy-cuda116 (>=5.0.0b4)"] +cuda117 = ["cupy-cuda117 (>=5.0.0b4)"] +cuda11x = ["cupy-cuda11x (>=11.0.0)"] +cuda12x = ["cupy-cuda12x (>=11.5.0)"] +cuda80 = ["cupy-cuda80 (>=5.0.0b4)"] +cuda90 = ["cupy-cuda90 (>=5.0.0b4)"] +cuda91 = ["cupy-cuda91 (>=5.0.0b4)"] +cuda92 = ["cupy-cuda92 (>=5.0.0b4)"] +datasets = ["ml_datasets (>=0.2.0,<0.3.0)"] +mxnet = ["mxnet (>=1.5.1,<1.6.0)"] +tensorflow = ["tensorflow (>=2.0.0,<2.6.0)"] +torch = ["torch (>=1.6.0)"] + [[package]] name = "threadpoolctl" version = "3.6.0" @@ -6582,13 +7195,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.13.1" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.13.1-py3-none-any.whl", hash = "sha256:4b6cf02909eb5495cfbc3f6e8fd49217e6cc7944e145cdda8caa3734777f9e69"}, - {file = "typing_extensions-4.13.1.tar.gz", hash = "sha256:98795af00fb9640edec5b8e31fc647597b4691f099ad75f469a2616be1a76dff"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] @@ -6656,13 +7269,13 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "2.3.0" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] @@ -6710,6 +7323,20 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "wasabi" +version = "1.1.3" +description = "A lightweight console printing and formatting toolkit" +optional = false +python-versions = ">=3.6" +files = [ + {file = "wasabi-1.1.3-py3-none-any.whl", hash = "sha256:f76e16e8f7e79f8c4c8be49b4024ac725713ab10cd7f19350ad18a8e3f71728c"}, + {file = "wasabi-1.1.3.tar.gz", hash = "sha256:4bb3008f003809db0c3e28b4daf20906ea871a2bb43f9914197d540f4f2e0878"}, +] + +[package.dependencies] +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\" and python_version >= \"3.7\""} + [[package]] name = "wcwidth" version = "0.2.13" @@ -6721,6 +7348,28 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "weasel" +version = "0.4.1" +description = "Weasel: A small and easy workflow system" +optional = false +python-versions = ">=3.7" +files = [ + {file = "weasel-0.4.1-py3-none-any.whl", hash = "sha256:24140a090ea1ac512a2b2f479cc64192fd1d527a7f3627671268d08ed5ac418c"}, + {file = "weasel-0.4.1.tar.gz", hash = "sha256:aabc210f072e13f6744e5c3a28037f93702433405cd35673f7c6279147085aa9"}, +] + +[package.dependencies] +cloudpathlib = ">=0.7.0,<1.0.0" +confection = ">=0.0.4,<0.2.0" +packaging = ">=20.0" +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<3.0.0" +requests = ">=2.13.0,<3.0.0" +smart-open = ">=5.2.1,<8.0.0" +srsly = ">=2.4.3,<3.0.0" +typer = ">=0.3.0,<1.0.0" +wasabi = ">=0.9.1,<1.2.0" + [[package]] name = "webcolors" version = "24.11.1" @@ -6761,13 +7410,13 @@ test = ["websockets"] [[package]] name = "widgetsnbextension" -version = "4.0.13" +version = "4.0.14" description = "Jupyter interactive widgets for Jupyter Notebook" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, - {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, + {file = "widgetsnbextension-4.0.14-py3-none-any.whl", hash = "sha256:4875a9eaf72fbf5079dc372a51a9f268fc38d46f767cbf85c43a36da5cb9b575"}, + {file = "widgetsnbextension-4.0.14.tar.gz", hash = "sha256:a3629b04e3edb893212df862038c7232f62973373869db5084aed739b437b5af"}, ] [[package]] @@ -7037,4 +7686,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "~3.10" -content-hash = "b3eab1568863563f4279757a54ea24ed126136606711c60b042468a79a3a2183" +content-hash = "18b80d0f03f525315906f90df53fd38d919a667fb7178ba632268c8ea72ff173" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 6877b7c6..f2af7267 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -50,11 +50,11 @@ environs = ">=9.5.0" fastapi = ">=0.110.0" fastparquet = ">=2023.10.1" fsspec = ">=2024.2.0" -graphrag = "==1.2.0" +graphrag = { git = "https://github.com/microsoft/graphrag.git", rev = "ffd8db7104defdcd131e5af38473d2f0815e3cf7" } httpx = ">=0.25.2" kubernetes = ">=29.0.0" markitdown = {extras = ["all"], version = "^0.1.1"} -networkx = ">=3.2.1" +networkx = ">=3.4.2" nltk = "*" pandas = ">=2.2.1" pyaml-env = ">=1.2.1" diff --git a/backend/scripts/indexer.py b/backend/scripts/indexer.py index ed8dcfb1..54814288 100644 --- a/backend/scripts/indexer.py +++ b/backend/scripts/indexer.py @@ -7,11 +7,14 @@ from pathlib import Path import graphrag.api as api -import yaml from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks -from graphrag.config.create_graphrag_config import create_graphrag_config -from graphrag.index.create_pipeline_config import create_pipeline_config -from graphrag.index.typing import PipelineRunResult + +# from graphrag.index.create_pipeline_config import create_pipeline_config +from graphrag.config.enums import IndexingMethod +from graphrag.config.load_config import load_config +from graphrag.config.models.graph_rag_config import GraphRagConfig +from graphrag.index.typing.pipeline_run_result import PipelineRunResult +from graphrag.index.workflows.factory import PipelineFactory from graphrag_app.logger import ( PipelineJobUpdater, @@ -48,55 +51,75 @@ def start_indexing_job(index_name: str): storage_name = pipeline_job.human_readable_index_name # load custom pipeline settings - SCRIPT_DIR = Path(__file__).resolve().parent - with (SCRIPT_DIR / "settings.yaml").open("r") as f: - data = yaml.safe_load(f) - # dynamically set some values - data["input"]["container_name"] = sanitized_storage_name - data["storage"]["container_name"] = sanitized_index_name - data["reporting"]["container_name"] = sanitized_index_name - data["cache"]["container_name"] = sanitized_index_name - if "vector_store" in data["embeddings"]: - data["embeddings"]["vector_store"]["collection_name"] = ( - f"{sanitized_index_name}_description_embedding" - ) - - # set prompt for entity extraction - if pipeline_job.entity_extraction_prompt: - fname = "entity-extraction-prompt.txt" - with open(fname, "w") as outfile: - outfile.write(pipeline_job.entity_extraction_prompt) - data["entity_extraction"]["prompt"] = fname + ROOT_DIR = Path(__file__).resolve().parent / "settings.yaml" + config: GraphRagConfig = load_config( + root_dir=ROOT_DIR.parent, + config_filepath=ROOT_DIR + ) + # dynamically assign the sanitized index name + config.vector_store["default_vector_store"].container_name = sanitized_index_name + + # dynamically set indexing storage values + config.input.container_name = sanitized_storage_name + config.output.container_name = sanitized_index_name + config.reporting.container_name = sanitized_index_name + config.cache.container_name = sanitized_index_name + + # update extraction prompts + PROMPT_DIR = Path(__file__).resolve().parent + + # set prompt for entity extraction / graph construction + if pipeline_job.entity_extraction_prompt is None: + # use the default prompt + config.extract_graph.prompt = None else: - data.pop("entity_extraction") + # try to load the custom prompt + fname = "extract_graph.txt" + with open(PROMPT_DIR / fname, "w") as file: + file.write(pipeline_job.entity_extraction_prompt) + config.extract_graph.prompt = fname # set prompt for entity summarization - if pipeline_job.entity_summarization_prompt: - fname = "entity-summarization-prompt.txt" - with open(fname, "w") as outfile: - outfile.write(pipeline_job.entity_summarization_prompt) - data["summarize_descriptions"]["prompt"] = fname + if pipeline_job.entity_summarization_prompt is None: + # use the default prompt + config.summarize_descriptions.prompt = None + else: + # try to load the custom prompt + fname = "summarize_descriptions.txt" + with open(PROMPT_DIR / fname, "w") as file: + file.write(pipeline_job.entity_summarization_prompt) + config.summarize_descriptions.prompt = fname + + # set prompt for community graph summarization + if pipeline_job.community_summarization_graph_prompt is None: + # use the default prompt + config.community_reports.graph_prompt = None else: - data.pop("summarize_descriptions") - - # set prompt for community summarization - if pipeline_job.community_summarization_prompt: - fname = "community-summarization-prompt.txt" - with open(fname, "w") as outfile: - outfile.write(pipeline_job.community_summarization_prompt) - data["community_reports"]["prompt"] = fname + # try to load the custom prompt + fname = "community_report_graph.txt" + with open(PROMPT_DIR / fname, "w") as file: + file.write(pipeline_job.community_summarization_graph_prompt) + pipeline_job.community_summarization_graph_prompt = fname + + # set prompt for community text summarization + if pipeline_job.community_summarization_text_prompt is None: + # use the default prompt + config.community_reports.text_prompt = None else: - data.pop("community_reports") + fname = "community_report_text.txt" + # try to load the custom prompt + with open(PROMPT_DIR / fname, "w") as file: + file.write(pipeline_job.community_summarization_text_prompt) + config.community_reports.text_prompt = fname - # generate default graphrag config parameters and override with custom settings - parameters = create_graphrag_config(data, ".") + # set the extraction strategy + indexing_method = IndexingMethod(pipeline_job.indexing_method) + pipeline_workflows = PipelineFactory.create_pipeline(config, indexing_method) # reset pipeline job details pipeline_job.status = PipelineJobState.RUNNING - pipeline_config = create_pipeline_config(parameters) - pipeline_job.all_workflows = [ - workflow.name for workflow in pipeline_config.workflows - ] + + pipeline_job.all_workflows = pipeline_workflows.names() pipeline_job.completed_workflows = [] pipeline_job.failed_workflows = [] @@ -117,7 +140,8 @@ def start_indexing_job(index_name: str): print("Building index...") pipeline_results: list[PipelineRunResult] = asyncio.run( api.build_index( - config=parameters, + config=config, + method=indexing_method, callbacks=[logger, pipeline_job_updater], ) ) diff --git a/backend/scripts/settings.yaml b/backend/scripts/settings.yaml index dfbe4ff3..23246b6c 100644 --- a/backend/scripts/settings.yaml +++ b/backend/scripts/settings.yaml @@ -3,135 +3,164 @@ # this yaml file serves as a configuration template for the graphrag indexing jobs # some values are hardcoded while others denoted by PLACEHOLDER will be dynamically set - -###################### LLM settings ###################### -encoding_model: cl100k_base # this needs to be matched to your model! - -llm: - type: azure_openai_chat - api_base: $GRAPHRAG_API_BASE - api_version: $GRAPHRAG_API_VERSION - model: $GRAPHRAG_LLM_MODEL - deployment_name: $GRAPHRAG_LLM_DEPLOYMENT_NAME - audience: $COGNITIVE_SERVICES_AUDIENCE - model_supports_json: True - tokens_per_minute: 80_000 - requests_per_minute: 480 - concurrent_requests: 25 - max_retries: 250 - max_retry_wait: 60.0 - sleep_on_rate_limit_recommendation: True - -parallelization: - num_threads: 10 - stagger: 0.25 - -async_mode: threaded # or asyncio - -embeddings: - vector_store: - type: azure_ai_search - collection_name: PLACEHOLDER - title_column: name - overwrite: True - url: $AI_SEARCH_URL - audience: $AI_SEARCH_AUDIENCE - llm: - type: azure_openai_embedding - api_base: $GRAPHRAG_API_BASE - api_version: $GRAPHRAG_API_VERSION - batch_size: 10 - model: $GRAPHRAG_EMBEDDING_MODEL - deployment_name: $GRAPHRAG_EMBEDDING_DEPLOYMENT_NAME - audience: $COGNITIVE_SERVICES_AUDIENCE - tokens_per_minute: 350_000 - requests_per_minute: 2_100 +# For a full list of available settings, see https://microsoft.github.io/graphrag/config/yaml/ ###################### Input settings ###################### +encoding_model: &encoding_name o200k_base # gpt-4o + input: - type: blob - file_type: text + type: blob # or file + file_type: text # [csv, text, json] base_dir: . - file_encoding: utf-8 - file_pattern: .*\.txt$ - storage_account_blob_url: $STORAGE_ACCOUNT_BLOB_URL container_name: PLACEHOLDER + storage_account_blob_url: &storage_account_blob_url ${STORAGE_ACCOUNT_BLOB_URL} chunks: - size: 1_200 - overlap: 100 - group_by_columns: [id] + size: 800 # 800 tokens (about 3200 characters) + overlap: 100 # 100 tokens (about 400 characters) + strategy: tokens # or sentences + encoding_model: *encoding_name ###################### Storage settings ###################### cache: - type: blob - storage_account_blob_url: $STORAGE_ACCOUNT_BLOB_URL - container_name: PLACEHOLDER + type: blob # or [file, memory, none, blob, cosmosdb] base_dir: cache + container_name: PLACEHOLDER + storage_account_blob_url: *storage_account_blob_url reporting: - type: blob - storage_account_blob_url: $STORAGE_ACCOUNT_BLOB_URL - container_name: PLACEHOLDER + type: blob # or [file, console, blob] base_dir: logs - -storage: - type: blob - storage_account_blob_url: $STORAGE_ACCOUNT_BLOB_URL container_name: PLACEHOLDER + storage_account_blob_url: *storage_account_blob_url + +output: + type: blob # or [file, memory, blob, cosmosdb] base_dir: output + container_name: PLACEHOLDER + storage_account_blob_url: *storage_account_blob_url -###################### Workflow settings ###################### -skip_workflows: [] +###################### LLM settings ###################### +models: + default_chat_model: &default_chat_model + encoding_model: *encoding_name + type: azure_openai_chat # or openai_chat + auth_type: azure_managed_identity # or api_key + model: ${GRAPHRAG_LLM_MODEL} + deployment_name: ${GRAPHRAG_LLM_DEPLOYMENT_NAME} + audience: ${COGNITIVE_SERVICES_AUDIENCE} + model_supports_json: true # recommended if this is available for your model. + concurrent_requests: ${GRAPHRAG_LLM_MODEL_CONCURRENT_REQUEST} # max number of simultaneous LLM requests allowed + async_mode: threaded # or asyncio + retry_strategy: native + max_retries: -1 # set to -1 for dynamic retry logic (most optimal setting based on server response) + tokens_per_minute: 0 # set to 0 to disable rate limiting + requests_per_minute: 0 # set to 0 to disable rate limiting + api_base: ${GRAPHRAG_API_BASE} + api_version: ${GRAPHRAG_API_VERSION} + default_embedding_model: + <<: *default_chat_model # use the same settings as the chat model + type: azure_openai_embedding # or azure_openai_embedding + model: ${GRAPHRAG_EMBEDDING_MODEL} + deployment_name: ${GRAPHRAG_EMBEDDING_DEPLOYMENT_NAME} + concurrent_requests: ${GRAPHRAG_EMBEDDING_MODEL_CONCURRENT_REQUEST} # max number of simultaneous LLM requests allowed + +vector_store: + default_vector_store: + type: azure_ai_search # or [lancedb, azure_ai_search, cosmosdb] + container_name: PLACEHOLDER + overwrite: True + url: ${AI_SEARCH_URL} + audience: ${AI_SEARCH_AUDIENCE} + +embed_text: + model_id: default_embedding_model + vector_store_id: default_vector_store -entity_extraction: +###################### Workflow settings ###################### +extract_graph: + model_id: default_chat_model prompt: PLACEHOLDER - entity_types: [organization, person, geo, event] max_gleanings: 1 +extract_graph_nlp: + text_analyzer: + extractor_type: regex_english # [regex_english, syntactic_parser, cfg] + summarize_descriptions: + model_id: default_chat_model prompt: PLACEHOLDER max_length: 500 claim_extraction: enabled: false + model_id: default_chat_model prompt: "prompts/claim_extraction.txt" description: "Any claims or facts that could be relevant to information discovery." max_gleanings: 1 community_reports: - prompt: PLACEHOLDER + model_id: default_chat_model + graph_prompt: "prompts/community_report_graph.txt" + text_prompt: prompts/community_report_text.txt max_length: 2_000 max_input_length: 8_000 - + cluster_graph: max_cluster_size: 10 embed_graph: - enabled: false + enabled: false # if true, will generate node2vec embeddings for nodes umap: - enabled: false + enabled: false # if true, will generate UMAP embeddings for nodes snapshots: - graphml: True + graphml: true + transient: true embeddings: false - transient: false ###################### Query settings ###################### + +####################### +# search parameters # +####################### + ## The prompt locations are required here, but each search method has a number of optional knobs that can be tuned. ## See the config docs: https://microsoft.github.io/graphrag/config/yaml/#query + +### Query settings ### local_search: - prompt: PLACEHOLDER + chat_model_id: default_chat_model + embedding_model_id: default_embedding_model + prompt: "prompts/local_search_system_prompt.txt" + text_unit_prop: 0.5 + community_prop: 0.1 + conversation_history_max_turns: 5 + top_k_entities: 10 + top_k_relationships: 10 + temperature: 0.0 + top_p: 1.0 + n: 1 + max_tokens: 12000 + llm_max_tokens: 2000 global_search: - map_prompt: PLACEHOLDER - reduce_prompt: PLACEHOLDER - knowledge_prompt: PLACEHOLDER + chat_model_id: default_chat_model + embedding_model_id: default_embedding_model + map_prompt: "prompts/global_search_map_system_prompt.txt" + reduce_prompt: "prompts/global_search_reduce_system_prompt.txt" + knowledge_prompt: "prompts/global_search_knowledge_system_prompt.txt" + concurrency: 50 drift_search: - prompt: PLACEHOLDER - reduce_prompt: PLACEHOLDER + chat_model_id: default_chat_model + embedding_model_id: default_embedding_model + prompt: "prompts/drift_search_system_prompt.txt" + reduce_prompt: "prompts/drift_search_reduce_prompt.txt" + concurrency: 50 basic_search: - prompt: PLACEHOLDER + chat_model_id: default_chat_model + embedding_model_id: default_embedding_model + prompt: "prompts/basic_search_system_prompt.txt" diff --git a/docs/DEPLOYMENT-GUIDE.md b/docs/DEPLOYMENT-GUIDE.md index cf763914..3c8b940e 100644 --- a/docs/DEPLOYMENT-GUIDE.md +++ b/docs/DEPLOYMENT-GUIDE.md @@ -97,20 +97,26 @@ In the `deploy.parameters.json` file, provide values for the following required `GRAPHRAG_LLM_MODEL` | gpt-4 | No | Name of the Azure OpenAI LLM model to use (or deploy). `GRAPHRAG_LLM_MODEL_VERSION` | turbo-2024-04-09 | No | Model version of the LLM model to use (or deploy). Only required if deploying a new AOAI instance (i.e. `GRAPHRAG_API_BASE` is left undefined). `GRAPHRAG_LLM_DEPLOYMENT_NAME` | gpt-4 | No | Deployment name of the LLM model to use (or deploy). -`GRAPHRAG_LLM_MODEL_QUOTA` | 80 | No | TPM quota of the LLM model in units of 1000 (i.e. 10 = 10,000 TPM). Only required if deploying a new AOAI instance (i.e. `GRAPHRAG_API_BASE` is left undefined). +`GRAPHRAG_LLM_MODEL_CONCURRENT_REQUEST` | 15 | No | The max number of simultaneous chat completions LLM requests allowed. +`GRAPHRAG_LLM_MODEL_QUOTA` | 80 | No | TPM quota of the LLM model in units of 1000 (i.e. 10 = 10,000 TPM). Only required if deploying a new AOAI instance (i.e. +`GRAPHRAG_API_BASE` is left undefined). `GRAPHRAG_EMBEDDING_MODEL` | text-embedding-ada-002 | No | Name of the Azure OpenAI embedding model. `GRAPHRAG_EMBEDDING_MODEL_VERSION` | 2 | No | Model version of the embedding model to use (or deploy). Only required if deploying a new AOAI instance (i.e. `GRAPHRAG_API_BASE` is left undefined). `GRAPHRAG_EMBEDDING_DEPLOYMENT_NAME` | text-embedding-ada-002 | No | Deployment name of the embedding model to use (or deploy). +`GRAPHRAG_EMBEDDING_MODEL_CONCURRENT_REQUEST` | 15 | No | The max number of simultaneous embedding requests allowed. `GRAPHRAG_EMBEDDING_MODEL_QUOTA` | 300 | No | TPM quota of the embedding model in units of 1000 (i.e. 10 = 10,000 TPM). Only required if deploying a new AOAI instance (i.e. `GRAPHRAG_API_BASE` is left undefined). `GRAPHRAG_IMAGE` | graphrag:backend | No | The name and tag of the graphrag docker image in the container registry. Will default to `graphrag:backend` and be hosted at `my_container_registry_name>.azurecr.io/graphrag:backend`. `CONTAINER_REGISTRY_LOGIN_SERVER` | .azurecr.io | No | Endpoint of an existing Azure Container Registry where the `GRAPHRAG_IMAGE` docker image is hosted. If not provided, a unique name will be generated (recommended). `COGNITIVE_SERVICES_AUDIENCE` | `https://cognitiveservices.azure.com/.default` | No | Endpoint for cognitive services identity authorization. Should be explicitly set for deployments in other Azure clouds. `APIM_NAME` | | No | Hostname of the graphrag API. Must be a globally unique name. The API will be available at `https://.azure-api.net`. -`APIM_TIER` | Developer | No | The [APIM tier](https://azure.microsoft.com/en-us/pricing/details/api-management) to use. Can be either `Developer` or `StandardV2`. `StandardV2` costs more but will deploy faster. +`APIM_TIER` | Developer | No | The [APIM tier](https://azure.microsoft.com/en-us/pricing/details/api-management) to use. Can be either `Developer` or +`StandardV2`. `StandardV2` costs more but will deploy faster. +`AI_SEARCH_TIER` | standard | No | The [AI Search tier](https://learn.microsoft.com/en-us/azure/search/search-sku-tier) to use. Can be either `free`, `basic`, `standard`, `standard2`, `standard3`, `storage_optimized_l1`, or `storage_optimized_l2` `RESOURCE_BASE_NAME` | | No | Suffix to apply to all azure resource names. If not provided a unique suffix will be generated. `AISEARCH_ENDPOINT_SUFFIX` | `search.windows.net` | No | Suffix to apply to AI search endpoint. Should be overridden for deployments in other Azure clouds. `AISEARCH_AUDIENCE` | `https://search.azure.com/` | No | AAD audience for AI Search. Should be overridden for deployments in other Azure clouds. + ### 5. Deploy solution accelerator to the resource group ```shell cd infra diff --git a/infra/core/ai-search/ai-search.bicep b/infra/core/ai-search/ai-search.bicep index 1ac19a07..93d831d6 100644 --- a/infra/core/ai-search/ai-search.bicep +++ b/infra/core/ai-search/ai-search.bicep @@ -10,11 +10,23 @@ param location string = resourceGroup().location @allowed(['enabled', 'disabled']) param publicNetworkAccess string = 'enabled' +@allowed([ + 'free' + 'basic' + 'standard' + 'standard2' + 'standard3' + 'storage_optimized_l1' + 'storage_optimized_l2' +]) +@description('The pricing tier of the search service you want to create (for example, basic or standard).') +param sku string = 'standard' + resource search 'Microsoft.Search/searchServices@2024-06-01-preview' = { name: name location: location sku: { - name: 'standard' + name: sku } properties: { disableLocalAuth: true diff --git a/infra/core/cosmosdb/cosmosdb.bicep b/infra/core/cosmosdb/cosmosdb.bicep index 55ae6108..85594bc4 100644 --- a/infra/core/cosmosdb/cosmosdb.bicep +++ b/infra/core/cosmosdb/cosmosdb.bicep @@ -10,7 +10,7 @@ param location string = resourceGroup().location @allowed(['Enabled', 'Disabled']) param publicNetworkAccess string = 'Disabled' -var maxThroughput = 1000 +var maxThroughput = 3000 resource cosmosDb 'Microsoft.DocumentDB/databaseAccounts@2024-11-15' = { name: cosmosDbName @@ -54,7 +54,11 @@ resource cosmosDb 'Microsoft.DocumentDB/databaseAccounts@2024-11-15' = { } ] cors: [] - capabilities: [] + capabilities: [ + // { + // name: 'EnableNoSQLVectorSearch' + // } + ] ipRules: [] backupPolicy: { type: 'Periodic' diff --git a/infra/core/rbac/workload-identity-rbac.bicep b/infra/core/rbac/workload-identity-rbac.bicep index 5e0876aa..f1b62e9b 100644 --- a/infra/core/rbac/workload-identity-rbac.bicep +++ b/infra/core/rbac/workload-identity-rbac.bicep @@ -23,6 +23,28 @@ param storageName string @description('ID of an existing AOAI resource.') param aoaiId string +// custom role definition for cosmosDB vector store- allows write access to the database and container level +var customRoleName = 'Graphrag Cosmos DB Data Writer - Allow principal to create SQL databases and containers' +resource customCosmosRoleDefinition 'Microsoft.DocumentDB/databaseAccounts/sqlRoleDefinitions@2024-12-01-preview' = { + // note: the guid must be globally unique and deterministic (reproducible) across Azure + name: guid(cosmosDb.id, customRoleName) + parent: cosmosDb + properties: { + roleName: customRoleName + type: 'CustomRole' + assignableScopes: [ + cosmosDb.id // defining the custom Cosmos DB role with the actual Cosmos account scope + ] + permissions: [ + { + dataActions: [ + 'Microsoft.DocumentDB/databaseAccounts/sqlDatabases/write' + ] + } + ] + } +} + @description('Role definitions for various roles that will be assigned. Learn more: https://learn.microsoft.com/en-us/azure/role-based-access-control/built-in-roles') var roleIds = { contributor: 'b24988ac-6180-42a0-ab88-20f7382dd24c' // Contributor Role @@ -36,6 +58,7 @@ var roleIds = { monitoringMetricsPublisher: '3913510d-42f4-4e42-8a64-420c390055eb' // Monitoring Metrics Publisher Role storageBlobDataContributor: 'ba92f5b4-2d11-453d-a403-e96b0029c9fe' // Storage Blob Data Contributor Role sqlDBContributor: '9b7fa17d-e63e-47b0-bb0a-15c516ac86ec' // SQL DB Contributor Role - cosmos control plane operations + customCosmosRoleDefinition: customCosmosRoleDefinition.id // Custom Cosmos DB role definition } // get references to existing resources @@ -177,6 +200,17 @@ resource sqlRoleAssignment 'Microsoft.DocumentDB/databaseAccounts/sqlRoleAssignm } } +// // assign the newly created (Graphrag Cosmos DB Data Contributor) custom role to the principal +// resource customRoleAssignment 'Microsoft.DocumentDB/databaseAccounts/sqlRoleAssignments@2024-12-01-preview' = { +// name: guid(cosmosDb.id, principalId, principalType, customCosmosRoleDefinition.id) +// parent: cosmosDb +// properties: { +// principalId: principalId +// roleDefinitionId: customCosmosRoleDefinition.id +// scope: cosmosDb.id +// } +// } + // var customRoleName = 'Custom cosmosDB role for graphrag - adds read/write permissions at the container level' // resource customCosmosRoleDefinition 'Microsoft.DocumentDB/databaseAccounts/sqlRoleDefinitions@2024-12-01-preview' = { // // note: the guid must be globally unique and deterministic (reproducible) across Azure diff --git a/infra/deploy.sh b/infra/deploy.sh index 324a0295..c6610c6f 100755 --- a/infra/deploy.sh +++ b/infra/deploy.sh @@ -2,13 +2,14 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -# set -ux # uncomment this line to debug +set -ux # uncomment this line to debug # TODO: use https://www.shellcheck.net to lint this script and make recommended updates aksNamespace="graphrag" # Optional parameters with default values AI_SEARCH_AUDIENCE="https://search.azure.com" +AI_SEARCH_TIER="standard" AISEARCH_ENDPOINT_SUFFIX="search.windows.net" APIM_NAME="" APIM_TIER="Developer" @@ -29,6 +30,8 @@ GRAPHRAG_EMBEDDING_MODEL="text-embedding-ada-002" GRAPHRAG_EMBEDDING_MODEL_VERSION="2" GRAPHRAG_EMBEDDING_DEPLOYMENT_NAME="text-embedding-ada-002" GRAPHRAG_EMBEDDING_MODEL_QUOTA="300" +GRAPHRAG_LLM_MODEL_CONCURRENT_REQUEST="15" +GRAPHRAG_EMBEDDING_MODEL_CONCURRENT_REQUEST="15" requiredParams=( LOCATION @@ -39,6 +42,7 @@ optionalParams=( AISEARCH_ENDPOINT_SUFFIX APIM_NAME APIM_TIER + AI_SEARCH_TIER CLOUD_NAME GRAPHRAG_IMAGE PUBLISHER_EMAIL @@ -56,6 +60,8 @@ optionalParams=( GRAPHRAG_EMBEDDING_MODEL_QUOTA GRAPHRAG_EMBEDDING_MODEL_VERSION GRAPHRAG_EMBEDDING_DEPLOYMENT_NAME + GRAPHRAG_LLM_MODEL_CONCURRENT_REQUEST + GRAPHRAG_EMBEDDING_MODEL_CONCURRENT_REQUEST ) errorBanner () { @@ -360,6 +366,7 @@ deployAzureResources () { --parameters "resourceBaseName=$RESOURCE_BASE_NAME" \ --parameters "apimName=$APIM_NAME" \ --parameters "apimTier=$APIM_TIER" \ + --parameters "aiSearchTier=$AI_SEARCH_TIER" \ --parameters "apiPublisherEmail=$PUBLISHER_EMAIL" \ --parameters "apiPublisherName=$PUBLISHER_NAME" \ --parameters "enablePrivateEndpoints=$ENABLE_PRIVATE_ENDPOINTS" \ @@ -552,7 +559,10 @@ installGraphRAGHelmChart () { --set "graphragConfig.GRAPHRAG_LLM_DEPLOYMENT_NAME=$graphragLlmModelDeployment" \ --set "graphragConfig.GRAPHRAG_EMBEDDING_MODEL=$graphragEmbeddingModel" \ --set "graphragConfig.GRAPHRAG_EMBEDDING_DEPLOYMENT_NAME=$graphragEmbeddingModelDeployment" \ - --set "graphragConfig.STORAGE_ACCOUNT_BLOB_URL=$storageAccountBlobUrl" + --set "graphragConfig.STORAGE_ACCOUNT_BLOB_URL=$storageAccountBlobUrl" \ + --set "graphragConfig.GRAPHRAG_LLM_MODEL_CONCURRENT_REQUEST=\"$GRAPHRAG_LLM_MODEL_CONCURRENT_REQUEST\"" \ + --set "graphragConfig.GRAPHRAG_EMBEDDING_MODEL_CONCURRENT_REQUEST=\"$GRAPHRAG_EMBEDDING_MODEL_CONCURRENT_REQUEST\"" + local helmResult helmResult=$? diff --git a/infra/main.bicep b/infra/main.bicep index dedd342f..d26b6479 100644 --- a/infra/main.bicep +++ b/infra/main.bicep @@ -95,7 +95,11 @@ param llmModelVersion string = '2024-08-06' param llmModelQuota int = 1 @description('Name of the AOAI embedding model to use. Must match official model id. For more information: https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models') -@allowed(['text-embedding-ada-002', 'text-embedding-3-large']) +@allowed([ + 'text-embedding-ada-002' + 'text-embedding-3-large' + 'text-embedding-3-small' +]) param embeddingModelName string = 'text-embedding-ada-002' @description('Deployment name of the AOAI embedding model to use. Must match official model id. For more information: https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models') @@ -127,6 +131,15 @@ var appUrl = 'http://${appHostname}' // end AKS parameters // +// +// start AI Search parameters +// +@description('Whether or not to restore the API Management service from a soft-deleted state.') +param aiSearchTier string = 'Standard' +// +// end AI Search parameters +// + var abbrs = loadJsonContent('abbreviations.json') var tags = { 'azd-env-name': resourceGroupName } param utcString string = utcNow() @@ -275,6 +288,7 @@ module aiSearch 'core/ai-search/ai-search.bicep' = { name: '${abbrs.searchSearchServices}${resourceBaseNameFinal}' location: location publicNetworkAccess: enablePrivateEndpoints ? 'disabled' : 'enabled' + sku: aiSearchTier } }