diff --git a/.coveragerc b/.coveragerc index 8d18511c..c3e5c376 100644 --- a/.coveragerc +++ b/.coveragerc @@ -3,7 +3,7 @@ [run] branch = true parallel = true -source = test +source = src [report] sort = cover diff --git a/.env-template b/.env-template new file mode 100644 index 00000000..2c64beef --- /dev/null +++ b/.env-template @@ -0,0 +1,25 @@ +####################### +# openAI setup +####################### +#OPENAI_API_KEY=... +#OPENAI_MODEL=gpt-4o + +####################### +# Azure OpenAI setup +####################### +# gpt-4o +#AZURE_OPENAI_API_KEY=... +#AZURE_OPENAI_ENDPOINT="..." +# text-embedding +#AZURE_OPENAI_API_KEY_EMBEDDING=... +#AZURE_OPENAI_ENDPOINT_EMBEDDING="..." + +####################### +# Outlook mail dump (tools/mail/outlook_dump.py) +####################### +# User email address for login (pre-fills the sign-in page) +OUTLOOK_CLIENT_ID=user@email.com +# Azure AD app registration client ID (GUID) +OUTLOOK_APPLICATION_CLIENT_ID=... +# Azure AD tenant ID (GUID) +OUTLOOK_TENANT_ID=... \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f253eb3..2d2dfd07 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,7 +4,7 @@ name: CI on: push: branches: [ "main" ] - pull_request_target: + pull_request: branches: [ "main" ] workflow_dispatch: # manual run @@ -15,36 +15,35 @@ concurrency: permissions: pull-requests: read contents: read - id-token: write actions: read jobs: - permissions-check: - runs-on: ubuntu-latest - steps: - - # The following two steps (permissions checks) ensure that only users with write access can run this workflow on a PR (except the merge queue bot) - # PRs from forks we check the permissions of the user that triggered the workflow (github.triggering_actor) - # This means that if a user without write access opens a PR from a fork, they cannot run this workflow - # Users with write access can still run this workflow on a PR from a fork - # For PRs from the same repo, we allow the workflow to run as normal - - name: Get User Permission - if: ${{ github.event_name == 'pull_request_target' || github.triggering_actor != 'github-merge-queue[bot]' }} - id: checkAccess - uses: actions-cool/check-user-permission@v2 - with: - require: write - username: ${{ github.triggering_actor }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Check User Permission - if: ${{ (github.event_name == 'pull_request_target' || github.triggering_actor != 'github-merge-queue[bot]') && steps.checkAccess.outputs.require-result == 'false' }} - run: | - echo "${{ github.triggering_actor }} does not have permissions on this repo." - echo "Current permission level is ${{ steps.checkAccess.outputs.user-permission }}" - echo "Job originally triggered by ${{ github.actor }}" - exit 1 + # permissions-check: + # runs-on: ubuntu-latest + # steps: + + # # The following two steps (permissions checks) ensure that only users with write access can run this workflow on a PR (except the merge queue bot) + # # PRs from forks we check the permissions of the user that triggered the workflow (github.triggering_actor) + # # This means that if a user without write access opens a PR from a fork, they cannot run this workflow + # # Users with write access can still run this workflow on a PR from a fork + # # For PRs from the same repo, we allow the workflow to run as normal + # - name: Get User Permission + # if: ${{ github.event_name == 'pull_request_target' || github.triggering_actor != 'github-merge-queue[bot]' }} + # id: checkAccess + # uses: actions-cool/check-user-permission@v2 + # with: + # require: write + # username: ${{ github.triggering_actor }} + # env: + # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # - name: Check User Permission + # if: ${{ (github.event_name == 'pull_request_target' || github.triggering_actor != 'github-merge-queue[bot]') && steps.checkAccess.outputs.require-result == 'false' }} + # run: | + # echo "${{ github.triggering_actor }} does not have permissions on this repo." + # echo "Current permission level is ${{ steps.checkAccess.outputs.user-permission }}" + # echo "Job originally triggered by ${{ github.actor }}" + # exit 1 check: strategy: @@ -127,7 +126,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest] - python-version: ['3.12', '3.13'] + python-version: ['3.12', '3.13', '3.14'] exclude: - os: windows-latest python-version: '3.13' @@ -158,58 +157,58 @@ jobs: run: | ./make.bat test - online-test: - needs: permissions-check - environment: - name: build-pipeline - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest] - python-version: ['3.14'] - runs-on: ${{ matrix.os }} - name: ${{ matrix.os }} (py ${{ matrix.python-version }}) - - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha || github.ref }} - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install uv - uses: astral-sh/setup-uv@v7 - - - name: Install Dependencies (Linux) - if: runner.os == 'Linux' - shell: bash - run: | - make sync - - - name: Install Dependencies (Windows) - if: runner.os == 'Windows' - run: | - make sync - - - name: Login to Azure - uses: azure/login@v2.2.0 - with: - client-id: ${{ secrets.AZURE_CLIENTID }} - tenant-id: ${{ secrets.AZURE_TENANTID }} - subscription-id: ${{ secrets.AZURE_SUBSCRIPTIONID }} - - - name: Get Keys - run: | - uv run python tools/get_keys.py --vault build-pipeline-kv - - - name: Run Test - shell: bash - run: | - uv run pytest - - - name: Clean up Keys - run: | - node -e "try{require('fs').unlinkSync('./.env');}catch(e){}" + # online-test: + # needs: permissions-check + # environment: + # name: build-pipeline + # strategy: + # fail-fast: false + # matrix: + # os: [ubuntu-latest] + # python-version: ['3.14'] + # runs-on: ${{ matrix.os }} + # name: ${{ matrix.os }} (py ${{ matrix.python-version }}) + + # steps: + # - uses: actions/checkout@v4 + # with: + # ref: ${{ github.event.pull_request.head.sha || github.ref }} + + # - name: Set up Python + # uses: actions/setup-python@v5 + # with: + # python-version: ${{ matrix.python-version }} + + # - name: Install uv + # uses: astral-sh/setup-uv@v7 + + # - name: Install Dependencies (Linux) + # if: runner.os == 'Linux' + # shell: bash + # run: | + # make sync + + # - name: Install Dependencies (Windows) + # if: runner.os == 'Windows' + # run: | + # make sync + + # - name: Login to Azure + # uses: azure/login@v2.2.0 + # with: + # client-id: ${{ secrets.AZURE_CLIENTID }} + # tenant-id: ${{ secrets.AZURE_TENANTID }} + # subscription-id: ${{ secrets.AZURE_SUBSCRIPTIONID }} + + # - name: Get Keys + # run: | + # uv run python tools/get_keys.py --vault build-pipeline-kv + + # - name: Run Test + # shell: bash + # run: | + # uv run pytest + + # - name: Clean up Keys + # run: | + # node -e "try{require('fs').unlinkSync('./.env');}catch(e){}" diff --git a/.github/workflows/release-py.yml b/.github/workflows/release-py.yml index bdb89978..e6cd62a7 100644 --- a/.github/workflows/release-py.yml +++ b/.github/workflows/release-py.yml @@ -32,7 +32,7 @@ jobs: echo "$HOME/.local/bin" >> "$GITHUB_PATH" - name: Create .venv and install deps - run: uv sync --extra dev + run: uv sync - name: Build sdist + wheel (via make) run: make build # runs `uv build`, outputs to dist/ diff --git a/.gitignore b/.gitignore index cc1b497a..e9431b17 100644 --- a/.gitignore +++ b/.gitignore @@ -31,13 +31,16 @@ pytest.local.ini # Evaluations /evals -/tests/testdata/Episode_53_Answer_results.json -/tests/testdata/Episode_53_Search_results.json + +# E-Mail test data +!/tests/testdata/email-testdata/.gitkeep +/tests/testdata/email-testdata/* # Email demo -/tools/gmail/client_secret.json -/tools/gmail/token.json +/tools/mail/client_secret.json +/tools/mail/token.json *_dump/ +fwts/ # Monty Python demo /examples/testdata/MP diff --git a/AGENTS.md b/AGENTS.md index 148180fb..6e391b01 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -9,9 +9,14 @@ that make changes to the repository. Not even `git add`** When moving, copying or deleting files, use the git commands: `git mv`, `git cp`, `git rm` +When I ask to update AGENTS.md (even if maybe) extract a general rule from what I said +before and update AGENTS.md (unless it's already in there -- maybe reformulate since +it apparently didn't work). Also, when it looks like I state a general rule, add it to +AGENTS.md. In all cases show what you added to AGENTS.md. + - Don't use '!' on the command line, it's some bash magic (even inside single quotes) -- Activate `.venv`: `make venv; source .venv/bin/activate` (run this only once) -- To get API keys in ad-hoc code, call `typeagent.aitools.utils.load_dotenv()` +- When running 'make' commands, do not use the venv (the Makefile uses 'uv run') +- To get API keys in ad-hoc code, call `load_dotenv()` - Use `pytest test` to run tests in test/ - Use `pyright` to check type annotations in src/, tools/, tests/, examples/ - Ignore build/, dist/ @@ -20,6 +25,8 @@ When moving, copying or deleting files, use the git commands: `git mv`, `git cp` - Use `make test` to run all tests - Use `make check test` to run `make check` and if it passes also run `make test` - Use `make format` to format all files using `black`. Do this before reporting success. +- When validating changes, first run `pytest` only on new/modified test files, then run `make format check test` once at the end. +- Keep ad-hoc and performance benchmarks under `tools/`, not `tests/`, so `make test` does not run them. ## Package Management with uv @@ -30,7 +37,7 @@ When moving, copying or deleting files, use the git commands: `git mv`, `git cp` - uv maintains consistency between `pyproject.toml`, `uv.lock`, and installed packages - Trust uv's automatic version resolution and file management -**IMPORTANT! YOU ARE NOT DONE UNTIL `make check test format` PASSES** +**IMPORTANT! YOU ARE NOT DONE UNTIL `make format check test` PASSES** # Code generation @@ -93,3 +100,6 @@ please follow these guidelines: * **Code Validation**: Don't use `py_compile` for syntax checking. Use `pyright` or `make check` instead for proper type checking and validation. + +* **Deprecations**: Don't deprecate things -- just delete them and fix the usage sites. + Don't create backward compatibility APIs or exports or whatever. Fix the usage sites. diff --git a/CHANGELOG.md b/CHANGELOG.md index 91c425d3..65be820e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,71 @@ # Typeagent Change Log +## 2026 + +### 0.4.0 (March 3) + +Lots of improvements; the highlights are provider-agnostic model +configuration backed by pydantic_ai, email ingestion, and a major +infrastructure overhaul. + +#### Core typeagent package +- Fixed a number of bugs that affected the core query algorithms +- Provider-agnostic chat and embedding model configuration via new + `model_adapters` module backed by pydantic_ai (#200): + - Use `provider:model` spec strings, + e.g. `create_chat_model("openai:gpt-4o")`. + - Replace `AsyncEmbeddingModel` with `IEmbedder`/`IEmbeddingModel` + protocols and `CachingEmbeddingModel`. + - Add `OPENAI_MODEL` and `OPENAI_EMBEDDING_MODEL` envvars + to override the default chat and embedding models. +- Split `interfaces.py` into separate modules + (`interfaces_core`, `_indexes`, `_search`, `_serialization`, + `_storage`) (Bernhard Merkle, #118). +- Make remaining storage-provider APIs async + (`get/set_conversation_metadata`, `is_source_ingested`, etc.) (#196). +- Fix listeners/recipients confusion in podcast metadata serialization (#174). +- Implement `SqliteRelatedTermsIndex.serialize()` (Rajiv Singh, #115). + +#### Email +- New _tools/ingest_email.py_ tool to ingest email + into a SQLite-backed conversation database (#111). +- Add _tools/mail/outlook_dump.py_ to dump Outlook/Microsoft 365 email + via the Graph API (Bernhard Merkle, #199). +- Add _tools/mail/mbox_dump.py_ to convert mbox files for ingestion + (Bernhard Merkle, #198). +- Consolidate mail dump tools under _tools/mail/_ with shared + _README.md_ (Bernhard Merkle). +- Various ergonomic improvements and fixes (#162, #168, #170). + +#### Tools +- Add conversation history to _tools/query.py_ for + pronoun/reference resolution across multi-turn queries + (Rajiv Singh, #117). +- Add _tools/load_json.py_ to load JSON-serialized index data + into a SQLite database; remove `--podcast` flag from + _tools/query.py_ (#164). + +#### Docs +- Improve docs for Azure env vars (#175). +- Add AgentCon 2026 presentation and demo videos + (Bernhard Merkle, #194, #202). +- VS Code / Pyright plugin setup instructions + (Bernhard Merkle, #150). + +#### Infrastructure +- Changes pyproject.toml to use uv more idiomatically. + - Local devs write `uv sync` instead of `uv sync --extra dev`. + - From PyPI (with uv or pip) you can use `typeagent[dev]` + to install the dev dependencies with the package. +- Move _typeagent/_ to _src/typeagent/_ (Bernhard Merkle, #139). +- Move tests and test data to _tests/_ directory (Bernhard Merkle, #144). +- Move ancillary dirs into subdirs (Bernhard Merkle, #145). +- Introduce `isort` for import sorting. +- Make pyright error on unused variables and imports (#129). +- Add readline support on Windows (#152, Bernhard Merkle). +- Enhance release script to update _uv.lock_ and create release PR + (Rajiv Singh, #169). + ## 2025 ### 0.3.3 (Nov 25) diff --git a/Makefile b/Makefile index e8b7643e..c2aeb33b 100644 --- a/Makefile +++ b/Makefile @@ -8,43 +8,49 @@ all: venv format check test build .PHONY: format format: venv - .venv/bin/isort src tests tools examples $(FLAGS) - .venv/bin/black -tpy312 -tpy313 -tpy314 src tests tools examples $(FLAGS) + uv run isort src tests tools examples $(FLAGS) + uv run black -tpy312 src tests tools examples $(FLAGS) .PHONY: check check: venv - .venv/bin/pyright --pythonpath .venv/bin/python src tests tools examples + uv run pyright src tests tools examples .PHONY: test test: venv - .venv/bin/pytest $(FLAGS) + uv run pytest $(FLAGS) .PHONY: coverage coverage: venv coverage erase - COVERAGE_PROCESS_START=.coveragerc .venv/bin/coverage run -m pytest $(FLAGS) + COVERAGE_PROCESS_START=.coveragerc uv run coverage run -m pytest $(FLAGS) coverage combine coverage report .PHONY: demo demo: venv - .venv/bin/python -m tools.query $(FLAGS) + uv run python -m tools.query $(FLAGS) .PHONY: compare compare: venv - .venv/bin/python -m tools.query --batch $(FLAGS) + uv run python -m tools.query --batch $(FLAGS) + +.PHONY: eval +eval: venv + rm -f eval.db + uv run python tools/load_json.py --database eval.db tests/testdata/Episode_53_AdrianTchaikovsky_index + uv run python tools/query.py --batch --database eval.db --answer-results tests/testdata/Episode_53_Answer_results.json --search-results tests/testdata/Episode_53_Search_results.json $(FLAGS) .PHONY: mcp mcp: venv - .venv/bin/mcp dev src/typeagent/mcp/server.py + uv run mcp dev src/typeagent/mcp/server.py .PHONY: profile profile: venv - +# Example: +# python tools/ingest_email.py -d ./data/mail.sqlite ./mail_dump +``` + +This creates (or updates) a database that the query tool can search against. + +## Step 3: Query + +Start an interactive query session against the ingested email database: + +```bash +python tools/query.py +``` + +You can ask natural-language questions about your emails (senders, topics, +dates, content, etc.) and the system will use Structured RAG to retrieve +relevant results. \ No newline at end of file diff --git a/docs/videos/py-demo-1.mp4 b/docs/videos/py-demo-1.mp4 new file mode 100644 index 00000000..c6664193 Binary files /dev/null and b/docs/videos/py-demo-1.mp4 differ diff --git a/docs/videos/py-demo-monty.mp4 b/docs/videos/py-demo-monty.mp4 new file mode 100644 index 00000000..c6375138 Binary files /dev/null and b/docs/videos/py-demo-monty.mp4 differ diff --git a/docs/videos/py-demo-python.mp4 b/docs/videos/py-demo-python.mp4 new file mode 100644 index 00000000..2291a6d6 Binary files /dev/null and b/docs/videos/py-demo-python.mp4 differ diff --git a/docs/videos/ts-demo-1.mp4 b/docs/videos/ts-demo-1.mp4 new file mode 100644 index 00000000..777a2502 Binary files /dev/null and b/docs/videos/ts-demo-1.mp4 differ diff --git a/docs/videos/ts-demo-2.mp4 b/docs/videos/ts-demo-2.mp4 new file mode 100644 index 00000000..02622df1 Binary files /dev/null and b/docs/videos/ts-demo-2.mp4 differ diff --git a/examples/demo/ingest.py b/examples/demo/ingest.py index 2f5730ce..e73d09a9 100644 --- a/examples/demo/ingest.py +++ b/examples/demo/ingest.py @@ -1,6 +1,10 @@ +from dotenv import load_dotenv + from typeagent import create_conversation from typeagent.transcripts.transcript import TranscriptMessage, TranscriptMessageMeta +load_dotenv() + def read_messages(filename) -> list[TranscriptMessage]: messages: list[TranscriptMessage] = [] diff --git a/examples/demo/query.py b/examples/demo/query.py index b28a7f02..3d2e2cc6 100644 --- a/examples/demo/query.py +++ b/examples/demo/query.py @@ -1,6 +1,10 @@ +from dotenv import load_dotenv + from typeagent import create_conversation from typeagent.transcripts.transcript import TranscriptMessage +load_dotenv() + async def main(): conversation = await create_conversation("demo.db", TranscriptMessage) diff --git a/examples/simple_query_demo.py b/examples/simple_query_demo.py index cbf54edd..c71eb30e 100644 --- a/examples/simple_query_demo.py +++ b/examples/simple_query_demo.py @@ -13,8 +13,9 @@ import asyncio +from dotenv import load_dotenv + from typeagent import create_conversation -from typeagent.aitools.utils import load_dotenv from typeagent.transcripts.transcript import TranscriptMessage, TranscriptMessageMeta diff --git a/make.bat b/make.bat index c1bd8517..2a2c85a6 100644 --- a/make.bat +++ b/make.bat @@ -13,6 +13,7 @@ if "%~1"=="" goto help if /I "%~1"=="format" goto format if /I "%~1"=="check" goto check if /I "%~1"=="test" goto test +if /I "%~1"=="coverage" goto coverage if /I "%~1"=="demo" goto demo if /I "%~1"=="build" goto build if /I "%~1"=="venv" goto venv @@ -27,26 +28,39 @@ goto help :format if not exist ".venv\" call make.bat venv echo Formatting code... -.venv\Scripts\isort src tests tools examples -.venv\Scripts\black src tests tools examples +uv run isort src tests tools examples +uv run black -tpy312 src tests tools examples goto end :check if not exist ".venv\" call make.bat venv echo Running type checks... -.venv\Scripts\pyright --pythonpath .venv\Scripts\python src tests tools examples +uv run pyright src tests tools examples goto end :test if not exist ".venv\" call make.bat venv echo Running unit tests... -.venv\Scripts\python -m pytest +uv run pytest goto end +:coverage +setlocal +if not exist ".venv\" call make.bat venv +echo Running test coverage... +uv run coverage erase +set "COVERAGE_PROCESS_START=.coveragerc" +uv run coverage run -m pytest +uv run coverage combine +uv run coverage report +endlocal +goto end + + :demo if not exist ".venv\" call make.bat venv echo Running query tool... -.venv\Scripts\python -m tools.query +uv run python -m tools.query goto end :build @@ -58,10 +72,10 @@ goto end :venv echo Creating virtual environment... uv sync -q -.venv\Scripts\python --version -.venv\Scripts\black --version -.venv\Scripts\pyright --version -.venv\Scripts\python -m pytest --version +uv run python --version +uv run black --version +uv run pyright --version +uv run pytest --version goto end :sync @@ -88,7 +102,7 @@ if exist .pytest_cache rmdir /s /q .pytest_cache goto end :help -echo Usage: .\make [format^|check^|test^|build^|venv^|sync^|install-uv^|clean^|help] +echo Usage: .\make [format^|check^|test^|coverage^|demo^|build^|venv^|sync^|install-uv^|clean^|help] goto end :end diff --git a/pyproject.toml b/pyproject.toml index 848a497d..99371ae7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [build-system] -requires = ["uv_build>=0.9.10,<0.10.0"] +requires = ["uv_build>=0.9.10,<0.11.0"] build-backend = "uv_build" [project] name = "typeagent" -version = "0.3.3" +version = "0.4.0.dev" description = "Incremental message indexing and querying pipelines using Structured RAG" readme = { file = "README.md", content-type = "text/markdown" } license = "MIT" @@ -30,13 +30,14 @@ classifiers = [ ] dependencies = [ "azure-identity>=1.22.0", - "black>=25.12.0", "colorama>=0.4.6", "mcp[cli]>=1.12.1", "numpy>=2.2.6", "openai>=1.81.0", "pydantic>=2.11.4", + "pydantic-ai-slim[openai]>=1.39.0", "pyreadline3>=3.5.4 ; sys_platform == 'win32'", + "pyright>=1.1.409", "python-dotenv>=1.1.0", "tiktoken>=0.12.0", "typechat>=0.0.4", @@ -80,14 +81,15 @@ known_local_folder = ["conftest"] dev = [ "azure-mgmt-authorization>=4.0.0", "azure-mgmt-keyvault>=12.1.1", + "black>=25.12.0", "coverage[toml]>=7.9.1", "google-api-python-client>=2.184.0", "google-auth-httplib2>=0.2.0", "google-auth-oauthlib>=1.2.2", "isort>=7.0.0", "logfire>=4.1.0", # So 'make check' passes + "msgraph-sdk>=1.54.0", "opentelemetry-instrumentation-httpx>=0.57b0", - "pydantic-ai-slim[openai]>=1.39.0", "pyright>=1.1.408", # 407 has a regression "pytest>=8.3.5", "pytest-asyncio>=0.26.0", diff --git a/src/typeagent/aitools/embeddings.py b/src/typeagent/aitools/embeddings.py index 8f0dae0d..8b579df2 100644 --- a/src/typeagent/aitools/embeddings.py +++ b/src/typeagent/aitools/embeddings.py @@ -1,305 +1,123 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -import asyncio -import os +from typing import Protocol, runtime_checkable import numpy as np from numpy.typing import NDArray -from openai import AsyncAzureOpenAI, AsyncOpenAI, DEFAULT_MAX_RETRIES, OpenAIError -from openai.types import Embedding -import tiktoken -from tiktoken import model as tiktoken_model -from tiktoken.core import Encoding - -from .auth import AzureTokenProvider, get_shared_token_provider -from .utils import timelog - type NormalizedEmbedding = NDArray[np.float32] # A single embedding type NormalizedEmbeddings = NDArray[np.float32] # An array of embeddings -DEFAULT_MODEL_NAME = "text-embedding-ada-002" -DEFAULT_EMBEDDING_SIZE = 1536 # Default embedding size (required for ada-002) -DEFAULT_ENVVAR = "AZURE_OPENAI_ENDPOINT_EMBEDDING" -TEST_MODEL_NAME = "test" -MAX_BATCH_SIZE = 2048 -MAX_TOKEN_SIZE = 4096 -MAX_TOKENS_PER_BATCH = 300_000 -MAX_CHAR_SIZE = MAX_TOKEN_SIZE * 3 -MAX_CHARS_PER_BATCH = MAX_TOKENS_PER_BATCH * 3 - -model_to_embedding_size_and_envvar: dict[str, tuple[int | None, str]] = { - DEFAULT_MODEL_NAME: (DEFAULT_EMBEDDING_SIZE, DEFAULT_ENVVAR), - "text-embedding-3-small": (1536, "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_SMALL"), - "text-embedding-3-large": (3072, "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_LARGE"), - # For testing only, not a real model (insert real embeddings above) - TEST_MODEL_NAME: (3, "SIR_NOT_APPEARING_IN_THIS_FILM"), -} +@runtime_checkable +class IEmbedder(Protocol): + """Minimal provider interface for embedding models. + + Implement this protocol to add support for a new embedding provider + (e.g. Anthropic, Gemini, local models). Only raw embedding computation + is required; caching is handled by :class:`CachingEmbeddingModel`. + + The production implementation is + :class:`~typeagent.aitools.model_adapters.PydanticAIEmbedder`. + """ + + @property + def model_name(self) -> str: ... + + async def get_embedding_nocache(self, input: str) -> NormalizedEmbedding: + """Compute a single embedding without caching.""" + ... + + async def get_embeddings_nocache(self, input: list[str]) -> NormalizedEmbeddings: + """Compute embeddings for a batch of strings without caching. + + Raises :class:`ValueError` if *input* is empty. + """ + ... + + +@runtime_checkable +class IEmbeddingModel(Protocol): + """Consumer-facing interface for embedding models with caching. + + This extends the provider interface (:class:`IEmbedder`) with caching + methods. Use :class:`CachingEmbeddingModel` to wrap an :class:`IEmbedder` + and get a ready-to-use ``IEmbeddingModel``. + """ + + @property + def model_name(self) -> str: ... + + def add_embedding(self, key: str, embedding: NormalizedEmbedding) -> None: + """Cache an already-computed embedding under the given key.""" + ... + + async def get_embedding_nocache(self, input: str) -> NormalizedEmbedding: + """Compute a single embedding without caching.""" + ... + + async def get_embeddings_nocache(self, input: list[str]) -> NormalizedEmbeddings: + """Compute embeddings for a batch of strings without caching.""" + ... + + async def get_embedding(self, key: str) -> NormalizedEmbedding: + """Retrieve a single embedding, using cache if available.""" + ... + + async def get_embeddings(self, keys: list[str]) -> NormalizedEmbeddings: + """Retrieve embeddings for multiple keys, using cache if available.""" + ... -class AsyncEmbeddingModel: - model_name: str - embedding_size: int - endpoint_envvar: str - azure_token_provider: AzureTokenProvider | None - async_client: AsyncOpenAI | None - azure_endpoint: str - azure_api_version: str - encoding: Encoding | None - max_chunk_size: int - max_size_per_batch: int - - _embedding_cache: dict[str, NormalizedEmbedding] - - def __init__( - self, - embedding_size: int | None = None, - model_name: str | None = None, - endpoint_envvar: str | None = None, - max_retries: int = DEFAULT_MAX_RETRIES, - ): - if model_name is None: - model_name = DEFAULT_MODEL_NAME - self.model_name = model_name - - suggested_embedding_size, suggested_endpoint_envvar = ( - model_to_embedding_size_and_envvar.get(model_name, (None, None)) - ) - - if embedding_size is None: - if suggested_embedding_size is not None: - embedding_size = suggested_embedding_size - else: - embedding_size = DEFAULT_EMBEDDING_SIZE - self.embedding_size = embedding_size - - if ( - model_name == DEFAULT_MODEL_NAME - and embedding_size != DEFAULT_EMBEDDING_SIZE - ): - raise ValueError( - f"Cannot customize embedding_size for default model {DEFAULT_MODEL_NAME}" - ) - - if endpoint_envvar is None: - if suggested_endpoint_envvar is not None: - endpoint_envvar = suggested_endpoint_envvar - else: - endpoint_envvar = DEFAULT_ENVVAR - self.endpoint_envvar = endpoint_envvar - - self.azure_token_provider = None - - if self.model_name == TEST_MODEL_NAME: - self.async_client = None - else: - openai_key_name = "OPENAI_API_KEY" - azure_key_name = "AZURE_OPENAI_API_KEY" - if openai_key := os.getenv(openai_key_name): - endpoint = os.getenv(self.endpoint_envvar) - with timelog(f"Using OpenAI"): - self.async_client = AsyncOpenAI( - base_url=endpoint, api_key=openai_key, max_retries=max_retries - ) - elif azure_api_key := os.getenv(azure_key_name): - with timelog("Using Azure OpenAI"): - self._setup_azure(azure_api_key) - else: - raise ValueError( - f"Neither {openai_key_name} nor {azure_key_name} found in environment." - ) - - if self.model_name in tiktoken_model.MODEL_TO_ENCODING: - encoding_name = tiktoken.encoding_name_for_model(self.model_name) - self.encoding = tiktoken.get_encoding(encoding_name) - self.max_chunk_size = MAX_TOKEN_SIZE - self.max_size_per_batch = MAX_TOKENS_PER_BATCH - else: - self.encoding = None - self.max_chunk_size = MAX_CHAR_SIZE - self.max_size_per_batch = MAX_CHARS_PER_BATCH - - self._embedding_cache = {} - - def _setup_azure(self, azure_api_key: str) -> None: - from .utils import get_azure_api_key, parse_azure_endpoint - - azure_api_key = get_azure_api_key(azure_api_key) - self.azure_endpoint, self.azure_api_version = parse_azure_endpoint( - self.endpoint_envvar - ) - - if azure_api_key != os.getenv("AZURE_OPENAI_API_KEY"): - # If we got a token from identity, store the provider for refresh - self.azure_token_provider = get_shared_token_provider() - - self.async_client = AsyncAzureOpenAI( - api_version=self.azure_api_version, - azure_endpoint=self.azure_endpoint, - api_key=azure_api_key, - ) - - async def refresh_auth(self): - """Update client when using a token provider and it's nearly expired.""" - # refresh_token is synchronous and slow -- run it in a separate thread - assert self.azure_token_provider - refresh_token = self.azure_token_provider.refresh_token - loop = asyncio.get_running_loop() - azure_api_key = await loop.run_in_executor(None, refresh_token) - assert self.azure_api_version - assert self.azure_endpoint - self.async_client = AsyncAzureOpenAI( - api_version=self.azure_api_version, - azure_endpoint=self.azure_endpoint, - api_key=azure_api_key, - ) +class CachingEmbeddingModel: + """Wraps an :class:`IEmbedder` with an in-memory embedding cache. + + This shared base class implements the caching logic once, so individual + embedding providers only need to implement the minimal :class:`IEmbedder` + protocol (``get_embedding_nocache`` / ``get_embeddings_nocache``). + """ + + def __init__(self, embedder: IEmbedder) -> None: + self._embedder = embedder + self._cache: dict[str, NormalizedEmbedding] = {} + + @property + def model_name(self) -> str: + return self._embedder.model_name def add_embedding(self, key: str, embedding: NormalizedEmbedding) -> None: - existing = self._embedding_cache.get(key) - if existing is not None: - assert np.array_equal(existing, embedding) - else: - self._embedding_cache[key] = embedding + self._cache[key] = embedding async def get_embedding_nocache(self, input: str) -> NormalizedEmbedding: - embeddings = await self.get_embeddings_nocache([input]) - return embeddings[0] + return await self._embedder.get_embedding_nocache(input) async def get_embeddings_nocache(self, input: list[str]) -> NormalizedEmbeddings: - if not input: - empty = np.array([], dtype=np.float32) - empty.shape = (0, self.embedding_size) - return empty - if self.azure_token_provider and self.azure_token_provider.needs_refresh(): - await self.refresh_auth() - extra_args = {} - if self.model_name != DEFAULT_MODEL_NAME: - extra_args["dimensions"] = self.embedding_size - if self.async_client is None: - # Compute a random embedding for testing purposes. - - def hashish(s: str) -> int: - # Primitive deterministic hash function (hash() varies per run) - h = 0 - for ch in s: - h = (h * 31 + ord(ch)) & 0xFFFFFFFF - return h - - prime = 1961 - fake_data: list[NormalizedEmbedding] = [] - for item in input: - if not item: - raise OpenAIError - length = len(item) - floats = [] - for i in range(self.embedding_size): - cut = i % length - scrambled = item[cut:] + item[:cut] - hashed = hashish(scrambled) - reduced = (hashed % prime) / prime - floats.append(reduced) - array = np.array(floats, dtype=np.float64) - normalized = array / np.sqrt(np.dot(array, array)) - dot = np.dot(normalized, normalized) - assert ( - abs(dot - 1.0) < 1e-15 - ), f"Embedding {normalized} is not normalized: {dot}" - fake_data.append(normalized) - assert len(fake_data) == len(input), (len(fake_data), "!=", len(input)) - result = np.array(fake_data, dtype=np.float32) - return result - else: - batches: list[list[str]] = [] - batch: list[str] = [] - batch_sum: int = 0 - for sentence in input: - truncated_input, truncated_input_size = await self.truncate_input( - sentence - ) - if ( - len(batch) >= MAX_BATCH_SIZE - or batch_sum + truncated_input_size > self.max_size_per_batch - ): - batches.append(batch) - batch = [] - batch_sum = 0 - batch.append(truncated_input) - batch_sum += truncated_input_size - if batch: - batches.append(batch) - - data: list[Embedding] = [] - for batch in batches: - embeddings_data = ( - await self.async_client.embeddings.create( - input=batch, - model=self.model_name, - encoding_format="float", - **extra_args, - ) - ).data - data.extend(embeddings_data) - - assert len(data) == len(input), (len(data), "!=", len(input)) - return np.array([d.embedding for d in data], dtype=np.float32) + return await self._embedder.get_embeddings_nocache(input) async def get_embedding(self, key: str) -> NormalizedEmbedding: - """Retrieve an embedding, using the cache.""" - if key in self._embedding_cache: - return self._embedding_cache[key] - embedding = await self.get_embedding_nocache(key) - self._embedding_cache[key] = embedding + cached = self._cache.get(key) + if cached is not None: + return cached + embedding = await self._embedder.get_embedding_nocache(key) + self._cache[key] = embedding return embedding async def get_embeddings(self, keys: list[str]) -> NormalizedEmbeddings: - """Retrieve embeddings for multiple keys, using the cache.""" - embeddings: list[NormalizedEmbedding | None] = [] - missing_keys: list[str] = [] - - # Collect cached embeddings and identify missing keys - for key in keys: - if key in self._embedding_cache: - embeddings.append(self._embedding_cache[key]) - else: - embeddings.append(None) # Placeholder for missing keys - missing_keys.append(key) - - # Retrieve embeddings for missing keys + if not keys: + raise ValueError("Cannot embed an empty list") + missing_keys = [k for k in keys if k not in self._cache] if missing_keys: - new_embeddings = await self.get_embeddings_nocache(missing_keys) - for key, embedding in zip(missing_keys, new_embeddings): - self._embedding_cache[key] = embedding - - # Replace placeholders with retrieved embeddings - for i, key in enumerate(keys): - if embeddings[i] is None: - embeddings[i] = self._embedding_cache[key] - return np.array(embeddings, dtype=np.float32).reshape( - (len(keys), self.embedding_size) - ) - - async def truncate_input(self, input: str) -> tuple[str, int]: - """Truncate input strings to fit within model limits. - - args: - input: The input string to truncate. - - returns: - A tuple of (truncated string, size after truncation). - """ - if self.encoding is None: - # Non-token-aware truncation - if len(input) > self.max_chunk_size: - return input[: self.max_chunk_size], self.max_chunk_size - else: - return input, len(input) - else: - # Token-aware truncation - tokens = self.encoding.encode(input) - if len(tokens) > self.max_chunk_size: - truncated_tokens = tokens[: self.max_chunk_size] - return self.encoding.decode(truncated_tokens), self.max_chunk_size - else: - return input, len(tokens) + fresh = await self._embedder.get_embeddings_nocache(missing_keys) + for i, k in enumerate(missing_keys): + self._cache[k] = fresh[i] + return np.array([self._cache[k] for k in keys], dtype=np.float32) + + +TEST_MODEL_NAME = "test" + +model_to_envvar: dict[str, str] = { + "text-embedding-ada-002": "AZURE_OPENAI_ENDPOINT_EMBEDDING", + "text-embedding-3-small": "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_SMALL", + "text-embedding-3-large": "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_LARGE", +} diff --git a/src/typeagent/aitools/model_adapters.py b/src/typeagent/aitools/model_adapters.py new file mode 100644 index 00000000..46208f6e --- /dev/null +++ b/src/typeagent/aitools/model_adapters.py @@ -0,0 +1,421 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Provider-agnostic model configuration backed by pydantic_ai. + +Create chat and embedding models from ``provider:model`` spec strings:: + + from typeagent.aitools.model_adapters import configure_models + + chat, embedder = configure_models( + "openai:gpt-4o", + "openai:text-embedding-3-small", + ) + +The spec format is ``provider:model``, matching pydantic_ai conventions. +Provider wiring (API keys, endpoints, etc.) is handled by pydantic_ai's +model registry, which supports 25+ providers including ``openai``, +``azure``, ``anthropic``, ``google``, ``bedrock``, ``groq``, ``mistral``, +``ollama``, ``cohere``, and many more. + +When a spec uses ``openai:`` as the provider and ``OPENAI_API_KEY`` is not +set, but ``AZURE_OPENAI_API_KEY`` is available, the provider is +automatically switched to Azure OpenAI. + +See https://ai.pydantic.dev/models/ for all supported providers and their +required environment variables. +""" + +from collections.abc import Sequence +import os + +import numpy as np +from numpy.typing import NDArray + +from pydantic_ai import Embedder as _PydanticAIEmbedder +from pydantic_ai.embeddings.base import EmbeddingModel as _PydanticAIEmbeddingModelBase +from pydantic_ai.embeddings.result import EmbeddingResult, EmbedInputType +from pydantic_ai.embeddings.settings import EmbeddingSettings +from pydantic_ai.messages import ( + ModelMessage, + ModelRequest, + SystemPromptPart, + TextPart, + UserPromptPart, +) +from pydantic_ai.models import infer_model, Model, ModelRequestParameters +import typechat + +from .embeddings import ( + CachingEmbeddingModel, + NormalizedEmbedding, + NormalizedEmbeddings, +) + +# --------------------------------------------------------------------------- +# Chat model adapter +# --------------------------------------------------------------------------- + + +class PydanticAIChatModel(typechat.TypeChatLanguageModel): + """Adapter from :class:`pydantic_ai.models.Model` to TypeChat's + :class:`~typechat.TypeChatLanguageModel`. + + This lets any pydantic_ai chat model (OpenAI, Anthropic, Google, …) be + used wherever TypeChat expects a ``TypeChatLanguageModel``. + """ + + def __init__(self, model: Model) -> None: + self._model = model + + async def complete( + self, prompt: str | list[typechat.PromptSection] + ) -> typechat.Result[str]: + parts: list[SystemPromptPart | UserPromptPart] = [] + if isinstance(prompt, str): + parts.append(UserPromptPart(content=prompt)) + else: + for section in prompt: + if section["role"] == "system": + parts.append(SystemPromptPart(content=section["content"])) + else: + parts.append(UserPromptPart(content=section["content"])) + + messages: list[ModelMessage] = [ModelRequest(parts=parts)] + params = ModelRequestParameters() + + response = await self._model.request(messages, None, params) + text_parts = [p.content for p in response.parts if isinstance(p, TextPart)] + if text_parts: + return typechat.Success("".join(text_parts)) + return typechat.Failure("No text content in model response") + + +# --------------------------------------------------------------------------- +# Embedding model adapter +# --------------------------------------------------------------------------- + + +class PydanticAIEmbedder: + """Adapter from :class:`pydantic_ai.Embedder` to :class:`IEmbedder`. + + This lets any pydantic_ai embedding provider (OpenAI, Cohere, Google, …) + be used wherever the codebase expects an ``IEmbedder``. Wrap in + :class:`~typeagent.aitools.embeddings.CachingEmbeddingModel` to get a + ready-to-use ``IEmbeddingModel`` with caching. + """ + + model_name: str + + def __init__( + self, + embedder: _PydanticAIEmbedder, + model_name: str, + ) -> None: + self._embedder = embedder + self.model_name = model_name + + async def get_embedding_nocache(self, input: str) -> NormalizedEmbedding: + result = await self._embedder.embed_documents([input]) + embedding: NDArray[np.float32] = np.array( + result.embeddings[0], dtype=np.float32 + ) + norm = float(np.linalg.norm(embedding)) + if norm > 0: + embedding = (embedding / norm).astype(np.float32) + return embedding + + async def get_embeddings_nocache(self, input: list[str]) -> NormalizedEmbeddings: + if not input: + raise ValueError("Cannot embed an empty list") + result = await self._embedder.embed_documents(input) + embeddings: NDArray[np.float32] = np.array(result.embeddings, dtype=np.float32) + norms = np.linalg.norm(embeddings, axis=1, keepdims=True).astype(np.float32) + norms = np.where(norms > 0, norms, np.float32(1.0)) + embeddings = (embeddings / norms).astype(np.float32) + return embeddings + + +# --------------------------------------------------------------------------- +# Provider auto-detection +# --------------------------------------------------------------------------- + + +def _needs_azure_fallback(provider: str) -> bool: + """Return True if *provider* is ``openai`` but only Azure credentials exist.""" + return ( + provider == "openai" + and not os.getenv("OPENAI_API_KEY") + and bool(os.getenv("AZURE_OPENAI_API_KEY")) + ) + + +def _make_azure_provider( + endpoint_envvar: str = "AZURE_OPENAI_ENDPOINT", + api_key_envvar: str = "AZURE_OPENAI_API_KEY", +): + """Create a :class:`pydantic_ai.providers.azure.AzureProvider`. + + Constructs an ``AsyncAzureOpenAI`` client from the given environment + variables and wraps it in an ``AzureProvider``. The endpoint env-var + may contain a full Azure deployment URL (including path and + ``api-version`` query parameter) — the same format used throughout + this codebase. + + When ``AZURE_OPENAI_API_KEY`` is set to ``"identity"``, the client + uses Azure Managed Identity via a token provider callback, which + refreshes tokens automatically before each request. + """ + from openai import AsyncAzureOpenAI + from pydantic_ai.providers.azure import AzureProvider + + from .utils import parse_azure_endpoint + + raw_key = os.environ[api_key_envvar] + azure_endpoint, api_version = parse_azure_endpoint(endpoint_envvar) + + if raw_key.lower() == "identity": + from .auth import get_shared_token_provider + + token_provider = get_shared_token_provider() + client = AsyncAzureOpenAI( + azure_endpoint=azure_endpoint, + api_version=api_version, + azure_ad_token_provider=token_provider.get_token, + max_retries=5, + ) + else: + apim_key = os.getenv("AZURE_APIM_SUBSCRIPTION_KEY") + client = AsyncAzureOpenAI( + azure_endpoint=azure_endpoint, + api_version=api_version, + api_key=raw_key, + default_headers=( + {"Ocp-Apim-Subscription-Key": apim_key} if apim_key else None + ), + max_retries=5, + ) + return AzureProvider(openai_client=client) + + +# --------------------------------------------------------------------------- +# Public API +# --------------------------------------------------------------------------- + + +DEFAULT_CHAT_SPEC = "openai:gpt-4o" + + +def create_chat_model( + model_spec: str | None = None, +) -> PydanticAIChatModel: + """Create a chat model from a ``provider:model`` spec. + + Delegates to :func:`pydantic_ai.models.infer_model` for provider wiring. + If the spec uses ``openai:`` and ``OPENAI_API_KEY`` is not set but + ``AZURE_OPENAI_API_KEY`` is, Azure OpenAI is used automatically. + + If *model_spec* is ``None``, it is constructed from the ``OPENAI_MODEL`` + environment variable (falling back to :data:`DEFAULT_CHAT_SPEC`). + + Examples:: + + model = create_chat_model() # uses OPENAI_MODEL or gpt-4o + model = create_chat_model("openai:gpt-4o") + model = create_chat_model("anthropic:claude-sonnet-4-20250514") + model = create_chat_model("google:gemini-2.0-flash") + """ + if model_spec is None: + openai_model = os.getenv("OPENAI_MODEL") + if openai_model: + model_spec = f"openai:{openai_model}" + else: + model_spec = DEFAULT_CHAT_SPEC + provider, _, model_name = model_spec.partition(":") + if _needs_azure_fallback(provider): + from pydantic_ai.models.openai import OpenAIChatModel + + from .utils import parse_azure_endpoint_parts + + if os.getenv("OPENAI_MODEL"): + print( + f"OPENAI_MODEL={os.getenv('OPENAI_MODEL')!r} ignored; " + f"Azure deployment is determined by AZURE_OPENAI_ENDPOINT" + ) + _, _, deployment_name = parse_azure_endpoint_parts() + model = OpenAIChatModel( + deployment_name or model_name, + provider=_make_azure_provider(), + ) + else: + model = infer_model(model_spec) + return PydanticAIChatModel(model) + + +DEFAULT_EMBEDDING_SPEC = "openai:text-embedding-ada-002" + + +def create_embedding_model( + model_spec: str | None = None, +) -> CachingEmbeddingModel: + """Create an embedding model from a ``provider:model`` spec. + + Delegates to :class:`pydantic_ai.Embedder` for provider wiring. + If the spec uses ``openai:`` and ``OPENAI_API_KEY`` is not set but + ``AZURE_OPENAI_API_KEY`` is, Azure OpenAI is used automatically. + + If *model_spec* is ``None``, it is constructed from the + ``OPENAI_EMBEDDING_MODEL`` environment variable (falling back to + :data:`DEFAULT_EMBEDDING_SPEC`). + + Returns a :class:`~typeagent.aitools.embeddings.CachingEmbeddingModel` + wrapping a :class:`PydanticAIEmbedder`. + + Examples:: + + model = create_embedding_model() # uses OPENAI_EMBEDDING_MODEL or ada-002 + model = create_embedding_model("openai:text-embedding-3-small") + model = create_embedding_model("cohere:embed-english-v3.0") + model = create_embedding_model("google:text-embedding-004") + """ + if model_spec is None: + openai_embedding_model = os.getenv("OPENAI_EMBEDDING_MODEL") + if openai_embedding_model: + model_spec = f"openai:{openai_embedding_model}" + else: + model_spec = DEFAULT_EMBEDDING_SPEC + provider, _, model_name = model_spec.partition(":") + if not model_name: + model_name = provider # No colon in spec + if _needs_azure_fallback(provider): + from pydantic_ai.embeddings.openai import OpenAIEmbeddingModel + + from .embeddings import model_to_envvar + from .utils import parse_azure_endpoint_parts + + # Look up model-specific Azure endpoint, falling back to the generic one. + suggested_envvar = model_to_envvar.get(model_name) + if suggested_envvar and os.getenv(suggested_envvar): + endpoint_envvar = suggested_envvar + else: + endpoint_envvar = "AZURE_OPENAI_ENDPOINT_EMBEDDING" + # Allow a model-specific API key, falling back to the generic one. + api_key_envvar = "AZURE_OPENAI_API_KEY_EMBEDDING" + if not os.getenv(api_key_envvar): + api_key_envvar = "AZURE_OPENAI_API_KEY" + + azure_provider = _make_azure_provider(endpoint_envvar, api_key_envvar) + _, _, deployment_name = parse_azure_endpoint_parts(endpoint_envvar) + embedding_model = OpenAIEmbeddingModel( + deployment_name or model_name, + provider=azure_provider, + ) + embedder = _PydanticAIEmbedder(embedding_model) + else: + embedder = _PydanticAIEmbedder(model_spec) + return CachingEmbeddingModel(PydanticAIEmbedder(embedder, model_name)) + + +# --------------------------------------------------------------------------- +# Test helpers +# --------------------------------------------------------------------------- + + +def _hashish(s: str) -> int: + """Deterministic hash function for fake embeddings (hash() varies per run).""" + h = 0 + for ch in s: + h = (h * 31 + ord(ch)) & 0xFFFFFFFF + return h + + +def _compute_fake_embeddings( + input_texts: list[str], embedding_size: int +) -> list[list[float]]: + """Generate deterministic fake embeddings for testing (unnormalized). + + Raises :class:`ValueError` on empty input strings. + """ + prime = 1961 + result: list[list[float]] = [] + for item in input_texts: + if not item: + raise ValueError("Empty input text") + length = len(item) + floats: list[float] = [] + for i in range(embedding_size): + cut = i % length + scrambled = item[cut:] + item[:cut] + hashed = _hashish(scrambled) + reduced = (hashed % prime) / prime + floats.append(reduced) + result.append(floats) + return result + + +class _FakePydanticAIEmbeddingModel(_PydanticAIEmbeddingModelBase): + """A pydantic_ai :class:`EmbeddingModel` that returns deterministic fake + embeddings. Used only for testing — no network calls are made.""" + + def __init__(self, embedding_size: int = 3) -> None: + super().__init__() + self._embedding_size = embedding_size + + @property + def model_name(self) -> str: + return "test" + + @property + def system(self) -> str: + return "test" + + async def embed( + self, + inputs: str | Sequence[str], + *, + input_type: EmbedInputType, + settings: EmbeddingSettings | None = None, + ) -> EmbeddingResult: + inputs_list, settings = self.prepare_embed(inputs, settings) + embeddings = _compute_fake_embeddings(inputs_list, self._embedding_size) + return EmbeddingResult( + embeddings=embeddings, + inputs=inputs_list, + input_type=input_type, + model_name="test", + provider_name="test", + ) + + +def create_test_embedding_model( + embedding_size: int = 3, +) -> CachingEmbeddingModel: + """Create a :class:`CachingEmbeddingModel` with deterministic fake + embeddings for testing. No API keys or network access required.""" + fake_model = _FakePydanticAIEmbeddingModel(embedding_size) + pydantic_embedder = _PydanticAIEmbedder(fake_model) + return CachingEmbeddingModel(PydanticAIEmbedder(pydantic_embedder, "test")) + + +def configure_models( + chat_model_spec: str, + embedding_model_spec: str, +) -> tuple[PydanticAIChatModel, CachingEmbeddingModel]: + """Configure both a chat model and an embedding model at once. + + Delegates to pydantic_ai's model registry for provider wiring. + + Example:: + + chat, embedder = configure_models( + "openai:gpt-4o", + "openai:text-embedding-3-small", + ) + + settings = ConversationSettings(model=embedder) + extractor = KnowledgeExtractor(model=chat) + """ + return ( + create_chat_model(chat_model_spec), + create_embedding_model(embedding_model_spec), + ) diff --git a/src/typeagent/aitools/utils.py b/src/typeagent/aitools/utils.py index c5c11e54..32cc4d9b 100644 --- a/src/typeagent/aitools/utils.py +++ b/src/typeagent/aitools/utils.py @@ -11,9 +11,7 @@ import sys import time -import black import colorama -import dotenv import typechat @@ -46,25 +44,29 @@ def timelog(label: str, verbose: bool = True): def pretty_print(obj: object, prefix: str = "", suffix: str = "") -> None: - """Pretty-print an object using black. + """Pretty-print an object using pprint.""" + import pprint - NOTE: Only works if its repr() is a valid Python expression. - """ - print(prefix + format_code(repr(obj)) + suffix) + line_width = min(200, shutil.get_terminal_size().columns) + print(prefix + pprint.pformat(obj, width=line_width) + suffix) def format_code(text: str, line_width=None) -> str: - """Format a block of code using black, then reindent to 2 spaces. + """Format a Python literal expression using pprint. - NOTE: The text must be a valid Python expression or code block. + NOTE: The text must be a valid Python literal expression (as produced by repr()). + Falls back to plain text formatting if the text is not a valid literal. """ + import ast + import pprint + if line_width is None: - # Use the terminal width, but cap it to 200 characters. line_width = min(200, shutil.get_terminal_size().columns) - formatted_text = black.format_str( - text, mode=black.Mode(line_length=line_width) - ).rstrip() - return reindent(formatted_text) + try: + return pprint.pformat(ast.literal_eval(text), width=line_width) + except (ValueError, SyntaxError): + # Fall back to simple pprint of the string itself if it's not a valid literal + return pprint.pformat(text, width=line_width) def reindent(text: str) -> str: @@ -78,21 +80,6 @@ def reindent(text: str) -> str: return "\n".join(reindented_lines) -def load_dotenv() -> None: - """Load environment variables from '/.env'.""" - # Look for ".env" in current directory and up until root. - cur_dir = os.path.abspath(os.getcwd()) - while True: - path = os.path.join(cur_dir, ".env") - if os.path.exists(path): - dotenv.load_dotenv(path) - return - parent_dir = os.path.dirname(cur_dir) - if parent_dir == cur_dir: - break # Reached filesystem root ('/'). - cur_dir = parent_dir - - def create_translator[T]( model: typechat.TypeChatLanguageModel, schema_class: type[T], @@ -203,17 +190,47 @@ def parse_azure_endpoint( Raises: RuntimeError: If endpoint is not found or doesn't contain api-version. """ + endpoint, version, _ = parse_azure_endpoint_parts(endpoint_envvar) + return endpoint, version + + +def parse_azure_endpoint_parts( + endpoint_envvar: str = "AZURE_OPENAI_ENDPOINT", +) -> tuple[str, str, str | None]: + """Parse Azure OpenAI endpoint, version, and optional deployment name. + + Returns: + Tuple of (endpoint_url, api_version, deployment_name). + + The deployment name is extracted from endpoints of the form + ``.../openai/deployments//...`` and is ``None`` otherwise. + """ azure_endpoint = os.getenv(endpoint_envvar) if not azure_endpoint: raise RuntimeError(f"Environment variable {endpoint_envvar} not found") - m = re.search(r"[?,]api-version=([\d-]+(?:preview)?)", azure_endpoint) + m = re.search(r"[?&]api-version=([\d-]+(?:preview)?)", azure_endpoint) if not m: raise RuntimeError( f"{endpoint_envvar}={azure_endpoint} doesn't contain valid api-version field" ) - return azure_endpoint, m.group(1) + clean_endpoint = azure_endpoint.split("?", 1)[0] + deployment_match = re.search( + r"/openai/deployments/([^/?]+)(?:/.*)?$", + clean_endpoint, + ) + deployment_name = deployment_match.group(1) if deployment_match else None + + # Strip query string and /openai... path — AsyncAzureOpenAI expects a + # clean base URL and builds the deployment path internally. + clean_endpoint = re.sub( + r"/openai(?:/deployments/[^/?]+(?:/.*)?)?$", + "", + clean_endpoint, + ) + + return clean_endpoint, m.group(1), deployment_name def get_azure_api_key(azure_api_key: str) -> str: @@ -235,44 +252,13 @@ def get_azure_api_key(azure_api_key: str) -> str: return azure_api_key -def create_async_openai_client( +def resolve_azure_model_name( + model_name: str, endpoint_envvar: str = "AZURE_OPENAI_ENDPOINT", - base_url: str | None = None, -): - """Create AsyncOpenAI or AsyncAzureOpenAI client based on environment variables. - - Returns the appropriate async OpenAI client based on what credentials are available. - Prefers OPENAI_API_KEY over AZURE_OPENAI_API_KEY. - - Args: - endpoint_envvar: Environment variable name for Azure endpoint (default: AZURE_OPENAI_ENDPOINT). - base_url: Optional base URL override for OpenAI client. - - Returns: - AsyncOpenAI or AsyncAzureOpenAI client instance. - - Raises: - RuntimeError: If neither OPENAI_API_KEY nor AZURE_OPENAI_API_KEY is set. - """ - from openai import AsyncAzureOpenAI, AsyncOpenAI - - if openai_api_key := os.getenv("OPENAI_API_KEY"): - return AsyncOpenAI(api_key=openai_api_key, base_url=base_url) - - elif azure_api_key := os.getenv("AZURE_OPENAI_API_KEY"): - azure_api_key = get_azure_api_key(azure_api_key) - azure_endpoint, api_version = parse_azure_endpoint(endpoint_envvar) - - return AsyncAzureOpenAI( - api_version=api_version, - azure_endpoint=azure_endpoint, - api_key=azure_api_key, - ) - - else: - raise RuntimeError( - "Neither OPENAI_API_KEY nor AZURE_OPENAI_API_KEY was provided." - ) +) -> str: + """Resolve an Azure deployment name from an endpoint, if present.""" + _, _, deployment_name = parse_azure_endpoint_parts(endpoint_envvar) + return deployment_name or model_name # The true return type is pydantic_ai.Agent[T], but that's an optional dependency. @@ -280,7 +266,6 @@ def make_agent[T](cls: type[T]): """Create Pydantic AI agent using hardcoded preferences.""" from pydantic_ai import Agent, NativeOutput, ToolOutput from pydantic_ai.models.openai import OpenAIChatModel - from pydantic_ai.providers.azure import AzureProvider # Prefer straight OpenAI over Azure OpenAI. if os.getenv("OPENAI_API_KEY"): @@ -288,21 +273,16 @@ def make_agent[T](cls: type[T]): print(f"## Using OpenAI with {Wrapper.__name__} ##") model = OpenAIChatModel("gpt-4o") # Retrieves OPENAI_API_KEY again. - elif azure_api_key := os.getenv("AZURE_OPENAI_API_KEY"): - azure_api_key = get_azure_api_key(azure_api_key) - azure_endpoint, api_version = parse_azure_endpoint("AZURE_OPENAI_ENDPOINT") + elif os.getenv("AZURE_OPENAI_API_KEY"): + from typeagent.aitools.model_adapters import _make_azure_provider - print(f"## {azure_endpoint} ##") + azure_provider = _make_azure_provider() Wrapper = ToolOutput - print(f"## Using Azure {api_version} with {Wrapper.__name__} ##") + print(f"## Using Azure with {Wrapper.__name__} ##") model = OpenAIChatModel( - "gpt-4o", - provider=AzureProvider( - azure_endpoint=azure_endpoint, - api_version=api_version, - api_key=azure_api_key, - ), + resolve_azure_model_name("gpt-4o"), + provider=azure_provider, ) else: diff --git a/src/typeagent/aitools/vectorbase.py b/src/typeagent/aitools/vectorbase.py index 3bbc5729..e22083c8 100644 --- a/src/typeagent/aitools/vectorbase.py +++ b/src/typeagent/aitools/vectorbase.py @@ -1,14 +1,17 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -from collections.abc import Callable, Iterable +from collections.abc import Callable from dataclasses import dataclass import numpy as np -from openai import DEFAULT_MAX_RETRIES - -from .embeddings import AsyncEmbeddingModel, NormalizedEmbedding, NormalizedEmbeddings +from .embeddings import ( + IEmbeddingModel, + NormalizedEmbedding, + NormalizedEmbeddings, +) +from .model_adapters import create_embedding_model @dataclass @@ -19,47 +22,34 @@ class ScoredInt: @dataclass class TextEmbeddingIndexSettings: - embedding_model: AsyncEmbeddingModel - embedding_size: int # Set to embedding_model.embedding_size + embedding_model: IEmbeddingModel min_score: float # Between 0.0 and 1.0 max_matches: int | None # >= 1; None means no limit batch_size: int # >= 1 - max_retries: int def __init__( self, - embedding_model: AsyncEmbeddingModel | None = None, - embedding_size: int | None = None, + embedding_model: IEmbeddingModel | None = None, min_score: float | None = None, max_matches: int | None = None, batch_size: int | None = None, - max_retries: int | None = None, ): self.min_score = min_score if min_score is not None else 0.85 self.max_matches = max_matches if max_matches and max_matches >= 1 else None self.batch_size = batch_size if batch_size and batch_size >= 1 else 8 - self.max_retries = ( - max_retries if max_retries is not None else DEFAULT_MAX_RETRIES - ) - self.embedding_model = embedding_model or AsyncEmbeddingModel( - embedding_size, max_retries=self.max_retries - ) - self.embedding_size = self.embedding_model.embedding_size - assert ( - embedding_size is None or self.embedding_size == embedding_size - ), f"Given embedding size {embedding_size} doesn't match model's embedding size {self.embedding_size}" + self.embedding_model = embedding_model or create_embedding_model() class VectorBase: settings: TextEmbeddingIndexSettings _vectors: NormalizedEmbeddings - _model: AsyncEmbeddingModel + _model: IEmbeddingModel _embedding_size: int def __init__(self, settings: TextEmbeddingIndexSettings): self.settings = settings self._model = settings.embedding_model - self._embedding_size = self._model.embedding_size + self._embedding_size = 0 self.clear() async def get_embedding(self, key: str, cache: bool = True) -> NormalizedEmbedding: @@ -88,6 +78,14 @@ def add_embedding( ) -> None: if isinstance(embedding, list): embedding = np.array(embedding, dtype=np.float32) + if self._embedding_size == 0: + self._set_embedding_size(len(embedding)) + self._vectors.shape = (0, self._embedding_size) + if len(embedding) != self._embedding_size: + raise ValueError( + f"Embedding size mismatch: expected {self._embedding_size}, " + f"got {len(embedding)}" + ) embeddings = embedding.reshape(1, -1) # Make it 2D: 1xN self._vectors = np.append(self._vectors, embeddings, axis=0) if key is not None: @@ -96,20 +94,30 @@ def add_embedding( def add_embeddings( self, keys: None | list[str], embeddings: NormalizedEmbeddings ) -> None: - assert embeddings.ndim == 2 - assert embeddings.shape[1] == self._embedding_size + if embeddings.ndim != 2: + raise ValueError(f"Expected 2D embeddings array, got {embeddings.ndim}D") + if self._embedding_size == 0: + self._set_embedding_size(embeddings.shape[1]) + self._vectors.shape = (0, self._embedding_size) + if embeddings.shape[1] != self._embedding_size: + raise ValueError( + f"Embedding size mismatch: expected {self._embedding_size}, " + f"got {embeddings.shape[1]}" + ) self._vectors = np.concatenate((self._vectors, embeddings), axis=0) if keys is not None: for key, embedding in zip(keys, embeddings): self._model.add_embedding(key, embedding) async def add_key(self, key: str, cache: bool = True) -> None: - embeddings = (await self.get_embedding(key, cache=cache)).reshape(1, -1) - self._vectors = np.append(self._vectors, embeddings, axis=0) + embedding = await self.get_embedding(key, cache=cache) + self.add_embedding(key if cache else None, embedding) async def add_keys(self, keys: list[str], cache: bool = True) -> None: + if not keys: + return embeddings = await self.get_embeddings(keys, cache=cache) - self._vectors = np.concatenate((self._vectors, embeddings), axis=0) + self.add_embeddings(keys if cache else None, embeddings) def fuzzy_lookup_embedding( self, @@ -122,17 +130,35 @@ def fuzzy_lookup_embedding( max_hits = 10 if min_score is None: min_score = 0.0 - # This line does most of the work: - scores: Iterable[float] = np.dot(self._vectors, embedding) - scored_ordinals = [ - ScoredInt(i, score) - for i, score in enumerate(scores) - if score >= min_score and (predicate is None or predicate(i)) - ] - scored_ordinals.sort(key=lambda x: x.score, reverse=True) - return scored_ordinals[:max_hits] + if len(self._vectors) == 0: + return [] + scores = np.dot(self._vectors, embedding) + if predicate is None: + # Stay in numpy: filter by score, then top-k via argpartition. + indices = np.flatnonzero(scores >= min_score) + if len(indices) == 0: + return [] + filtered_scores = scores[indices] + if len(indices) <= max_hits: + order = np.argsort(filtered_scores)[::-1] + else: + top_k = np.argpartition(filtered_scores, -max_hits)[-max_hits:] + order = top_k[np.argsort(filtered_scores[top_k])[::-1]] + return [ + ScoredInt(int(indices[i]), float(filtered_scores[i])) for i in order + ] + else: + # Predicate path: pre-filter by score in numpy, apply predicate + # only to candidates above the threshold. + candidates = np.flatnonzero(scores >= min_score) + scored_ordinals = [ + ScoredInt(int(i), float(scores[i])) + for i in candidates + if predicate(int(i)) + ] + scored_ordinals.sort(key=lambda x: x.score, reverse=True) + return scored_ordinals[:max_hits] - # TODO: Make this and fuzzy_lookup_embedding() more similar. def fuzzy_lookup_embedding_in_subset( self, embedding: NormalizedEmbedding, @@ -140,9 +166,27 @@ def fuzzy_lookup_embedding_in_subset( max_hits: int | None = None, min_score: float | None = None, ) -> list[ScoredInt]: - return self.fuzzy_lookup_embedding( - embedding, max_hits, min_score, lambda i: i in ordinals_of_subset - ) + if max_hits is None: + max_hits = 10 + if min_score is None: + min_score = 0.0 + if not ordinals_of_subset or len(self._vectors) == 0: + return [] + # Compute dot products only for the subset instead of all vectors. + subset = np.asarray(ordinals_of_subset) + scores = np.dot(self._vectors[subset], embedding) + indices = np.flatnonzero(scores >= min_score) + if len(indices) == 0: + return [] + filtered_scores = scores[indices] + if len(indices) <= max_hits: + order = np.argsort(filtered_scores)[::-1] + else: + top_k = np.argpartition(filtered_scores, -max_hits)[-max_hits:] + order = top_k[np.argsort(filtered_scores[top_k])[::-1]] + return [ + ScoredInt(int(subset[indices[i]]), float(filtered_scores[i])) for i in order + ] async def fuzzy_lookup( self, @@ -160,9 +204,15 @@ async def fuzzy_lookup( embedding, max_hits=max_hits, min_score=min_score, predicate=predicate ) + def _set_embedding_size(self, size: int) -> None: + """Adopt *size* when it was not known at construction time.""" + assert size > 0 + self._embedding_size = size + def clear(self) -> None: self._vectors = np.array([], dtype=np.float32) - self._vectors.shape = (0, self._embedding_size) + if self._embedding_size > 0: + self._vectors.shape = (0, self._embedding_size) def get_embedding_at(self, pos: int) -> NormalizedEmbedding: if 0 <= pos < len(self._vectors): @@ -175,13 +225,20 @@ def serialize_embedding_at(self, pos: int) -> NormalizedEmbedding | None: return self._vectors[pos] if 0 <= pos < len(self._vectors) else None def serialize(self) -> NormalizedEmbeddings: - assert self._vectors.shape == (len(self._vectors), self._embedding_size) + if self._embedding_size > 0: + assert self._vectors.shape == (len(self._vectors), self._embedding_size) return self._vectors # TODO: Should we make a copy? def deserialize(self, data: NormalizedEmbeddings | None) -> None: if data is None: self.clear() return + if self._embedding_size == 0: + if data.ndim < 2 or data.shape[0] == 0: + # Empty data — can't determine size; just clear. + self.clear() + return + self._set_embedding_size(data.shape[1]) assert data.shape == (len(data), self._embedding_size), [ data.shape, self._embedding_size, diff --git a/src/typeagent/emails/email_import.py b/src/typeagent/emails/email_import.py index 9f0c7d91..88f13b94 100644 --- a/src/typeagent/emails/email_import.py +++ b/src/typeagent/emails/email_import.py @@ -1,13 +1,14 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +from datetime import datetime from email import message_from_string -from email.header import decode_header, make_header +from email.header import decode_header, Header, make_header from email.message import Message from email.utils import parsedate_to_datetime from pathlib import Path import re -from typing import Iterable +from typing import Iterable, overload from .email_message import EmailMessage, EmailMessageMeta @@ -20,6 +21,27 @@ def decode_encoded_words(value: str) -> str: return str(make_header(decode_header(value))) +# Coerce an email header value to str or None. +# msg.get() can return an email.header.Header object instead of a plain str when the header contains RFC 2047 encoded words. +# Pydantic expects str, so we normalise here. + + +@overload +def _header_to_str(value: str | Header | None, default: str) -> str: ... + + +@overload +def _header_to_str(value: str | Header | None) -> str | None: ... + + +def _header_to_str( + value: str | Header | None, default: str | None = None +) -> str | None: + if value is None: + return default + return str(value) + + def import_emails_from_dir( dir_path: str, max_chunk_length: int = 4096 ) -> Iterable[EmailMessage]: @@ -64,14 +86,16 @@ def import_forwarded_email_string( # Imports an email.message.Message object and returns an EmailMessage object # If the message is a reply, returns only the latest response. def import_email_message(msg: Message, max_chunk_length: int) -> EmailMessage: - # Extract metadata from + # Extract metadata from headers. + # msg.get() can return a Header object instead of str for encoded headers, + # so coerce all values to str. email_meta = EmailMessageMeta( - sender=msg.get("From", ""), + sender=_header_to_str(msg.get("From"), ""), recipients=_import_address_headers(msg.get_all("To", [])), cc=_import_address_headers(msg.get_all("Cc", [])), bcc=_import_address_headers(msg.get_all("Bcc", [])), - subject=msg.get("Subject"), # TODO: Remove newlines - id=msg.get("Message-ID", None), + subject=_header_to_str(msg.get("Subject")), + id=_header_to_str(msg.get("Message-ID")), ) timestamp: str | None = None timestamp_date = msg.get("Date", None) @@ -175,7 +199,13 @@ def _decode_email_payload(part: Message) -> str: return payload return "" if isinstance(payload, bytes): - return payload.decode(part.get_content_charset() or "utf-8", errors="replace") + charset = part.get_content_charset() or "latin-1" + try: + return payload.decode(charset, errors="replace") + except LookupError: + # Unknown encoding (e.g. iso-8859-8-i); fall back to latin-1 + # which accepts all 256 byte values without loss. + return payload.decode("latin-1") if isinstance(payload, str): return payload return "" @@ -187,7 +217,7 @@ def _import_address_headers(headers: list[str]) -> list[str]: unique_addresses: set[str] = set() for header in headers: if header: - addresses = _remove_empty_strings(header.split(",")) + addresses = _remove_empty_strings(str(header).split(",")) for address in addresses: unique_addresses.add(address) @@ -233,8 +263,35 @@ def _merge_chunks( yield cur_chunk cur_chunk = new_chunk else: - cur_chunk += separator + if cur_chunk: + cur_chunk += separator cur_chunk += new_chunk if (len(cur_chunk)) > 0: yield cur_chunk + + +def email_matches_date_filter( + timestamp: str | None, + start_date: datetime | None, + stop_date: datetime | None, +) -> bool: + """Check whether an email's ISO timestamp passes the date filters. + + The range is half-open: [start_date, stop_date). + Emails without a parseable timestamp are always included. + """ + if timestamp is None: + return True + try: + email_dt = datetime.fromisoformat(timestamp) + except ValueError: + return True + # Treat offset-naive timestamps as local time for comparison + if email_dt.tzinfo is None: + email_dt = email_dt.astimezone() + if start_date and email_dt < start_date: + return False + if stop_date and email_dt >= stop_date: + return False + return True diff --git a/src/typeagent/emails/email_memory.py b/src/typeagent/emails/email_memory.py index d6cf06cb..6dd50cc4 100644 --- a/src/typeagent/emails/email_memory.py +++ b/src/typeagent/emails/email_memory.py @@ -8,11 +8,10 @@ import typechat -from ..aitools import utils +from ..aitools import model_adapters, utils from ..knowpro import ( answer_response_schema, answers, - convknowledge, search_query_schema, searchlang, ) @@ -24,7 +23,7 @@ class EmailMemorySettings: def __init__(self, conversation_settings: ConversationSettings) -> None: - self.language_model = convknowledge.create_typechat_model() + self.language_model = model_adapters.create_chat_model() self.query_translator = utils.create_translator( self.language_model, search_query_schema.SearchQuery ) diff --git a/src/typeagent/emails/email_message.py b/src/typeagent/emails/email_message.py index 97fd86ee..47abdbec 100644 --- a/src/typeagent/emails/email_message.py +++ b/src/typeagent/emails/email_message.py @@ -7,7 +7,7 @@ from pydantic import Field from pydantic.dataclasses import dataclass as pydantic_dataclass -from ..knowpro import kplib +from ..knowpro import knowledge_schema as kplib from ..knowpro.field_helpers import CamelCaseField from ..knowpro.interfaces import IKnowledgeSource, IMessage, IMessageMetadata @@ -161,6 +161,7 @@ def __init__(self, **data: Any) -> None: ) timestamp: str | None = None # Use metadata.sent_on for the actual sent time src_url: str | None = None # Source file or uri for this email + source_id: str | None = None # External source id (see IMessage.source_id) def get_knowledge(self) -> kplib.KnowledgeResponse: return self.metadata.get_knowledge() diff --git a/src/typeagent/knowpro/answer_context.py b/src/typeagent/knowpro/answer_context.py new file mode 100644 index 00000000..34b1aad5 --- /dev/null +++ b/src/typeagent/knowpro/answer_context.py @@ -0,0 +1,214 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Answer context utilities and chunking helpers.""" + +from collections.abc import Iterable, Iterator +from dataclasses import dataclass +from datetime import datetime +import json +from typing import Any + +from .answer_context_schema import AnswerContext, RelevantKnowledge, RelevantMessage + + +@dataclass +class AnswerContextOptions: + entities_top_k: int | None = None + topics_top_k: int | None = None + messages_top_k: int | None = None + chunking: bool | None = None + debug: bool | None = None + + +def answer_context_to_string(context: AnswerContext, spaces: int | None = 0) -> str: + json_parts: list[str] = ["{\n"] + property_count = 0 + + if context.entities: + json_parts.append( + _add_property("entities", context.entities, property_count, spaces) + ) + property_count += 1 + if context.topics: + json_parts.append( + _add_property("topics", context.topics, property_count, spaces) + ) + property_count += 1 + if context.messages: + json_parts.append( + _add_property("messages", context.messages, property_count, spaces) + ) + property_count += 1 + + json_parts.append("\n}") + return "".join(json_parts) + + +def _add_property(name: str, value: Any, count: int, spaces: int | None) -> str: + text = "" + if count > 0: + text += ",\n" + text += f'"{name}": {json_stringify_for_prompt(value, spaces)}' + return text + + +class AnswerContextChunkBuilder: + def __init__(self, context: AnswerContext, max_chars_per_chunk: int) -> None: + self.context = context + self.max_chars_per_chunk = max_chars_per_chunk + self.current_chunk = AnswerContext() + self.current_chunk_char_count = 0 + + def get_chunks( + self, include_knowledge: bool = True, include_messages: bool = True + ) -> Iterator[AnswerContext]: + self._new_chunk() + if include_knowledge: + for chunk in self._chunk_knowledge(self.context.entities, "entities"): + yield chunk + for chunk in self._chunk_knowledge(self.context.topics, "topics"): + yield chunk + if self.current_chunk_char_count > 0: + yield self.current_chunk + self._new_chunk() + if include_messages: + for chunk in self._chunk_messages(): + yield chunk + if self.current_chunk_char_count > 0: + yield self.current_chunk + + def _chunk_knowledge( + self, knowledge: list[RelevantKnowledge] | None, type_name: str + ) -> Iterator[AnswerContext]: + if knowledge: + for item in knowledge: + completed_chunk = self._add_to_current_chunk(item, type_name) + if completed_chunk is not None: + yield completed_chunk + + def _chunk_messages(self) -> Iterator[AnswerContext]: + if self.context.messages: + for message in self.context.messages: + if not message.message_text: + continue + message_chunks = split_large_text_into_chunks( + message.message_text, self.max_chars_per_chunk + ) + for msg_chunk in message_chunks: + chunk_message = RelevantMessage( + from_=message.from_, + to=message.to, + timestamp=message.timestamp, + message_text=msg_chunk, + ) + completed_chunk = self._add_to_current_chunk( + chunk_message, "messages" + ) + if completed_chunk is not None: + yield completed_chunk + + def _add_to_current_chunk(self, item: Any, type_name: str) -> AnswerContext | None: + item_string = json_stringify_for_prompt(item) + item_size = len(item_string) + if ( + self.current_chunk_char_count + item_size > self.max_chars_per_chunk + and self.current_chunk_char_count > 0 + ): + completed_chunk = self.current_chunk + self._new_chunk() + self._append_item(item, type_name, item_size) + return completed_chunk + self._append_item(item, type_name, item_size) + return None + + def _append_item(self, item: Any, type_name: str, item_size: int) -> None: + if getattr(self.current_chunk, type_name) is None: + setattr(self.current_chunk, type_name, []) + getattr(self.current_chunk, type_name).append(item) + self.current_chunk_char_count += item_size + + def _new_chunk(self) -> None: + self.current_chunk = AnswerContext() + self.current_chunk_char_count = 0 + + +def json_stringify_for_prompt(value: Any, spaces: int | None = None) -> str: + serializable = _to_prompt_value(value) + return json.dumps( + serializable, + ensure_ascii=False, + indent=spaces, + separators=(",", ":") if spaces is None else None, + default=_json_default, + ) + + +def _json_default(value: Any) -> Any: + if isinstance(value, datetime): + return value.isoformat() + raise TypeError(f"Object of type {type(value).__name__} is not JSON serializable") + + +def split_large_text_into_chunks( + text: str | list[str], max_chars_per_chunk: int +) -> list[str]: + if isinstance(text, list): + chunks: list[str] = [] + for part in text: + chunks.extend(split_large_text_into_chunks(part, max_chars_per_chunk)) + return chunks + if len(text) <= max_chars_per_chunk: + return [text] + return list(_split_text_by_paragraph(text, max_chars_per_chunk)) + + +def _split_text_by_paragraph(text: str, max_chars_per_chunk: int) -> Iterable[str]: + paragraphs = [p.strip() for p in text.split("\n\n") if p.strip()] + if not paragraphs: + return [text[:max_chars_per_chunk]] + return _merge_chunks(paragraphs, "\n\n", max_chars_per_chunk) + + +def _merge_chunks( + chunks: Iterable[str], separator: str, max_chars_per_chunk: int +) -> Iterable[str]: + current = "" + for new_chunk in chunks: + if len(new_chunk) > max_chars_per_chunk: + new_chunk = new_chunk[:max_chars_per_chunk] + if ( + current + and len(current) + len(new_chunk) + len(separator) > max_chars_per_chunk + ): + yield current + current = new_chunk + else: + current = new_chunk if not current else f"{current}{separator}{new_chunk}" + if current: + yield current + + +def _to_prompt_value(value: Any) -> Any: + if value is None: + return None + if hasattr(value, "__pydantic_serializer__"): + return value.__pydantic_serializer__.to_python( # type: ignore[attr-defined] + value, by_alias=True, exclude_none=True + ) + if hasattr(value, "__annotations__"): + data: dict[str, Any] = {} + for key in value.__annotations__: + item = getattr(value, key, None) + if item is not None: + data[key] = _to_prompt_value(item) + return data + if isinstance(value, dict): + return { + key: _to_prompt_value(item) + for key, item in value.items() + if item is not None + } + if isinstance(value, list): + return [_to_prompt_value(item) for item in value] + return value diff --git a/src/typeagent/knowpro/answer_context_schema.py b/src/typeagent/knowpro/answer_context_schema.py index 57625caa..6f3bdaeb 100644 --- a/src/typeagent/knowpro/answer_context_schema.py +++ b/src/typeagent/knowpro/answer_context_schema.py @@ -3,14 +3,15 @@ # TODO: Are we sure this isn't used as a translator schema class? -from dataclasses import dataclass -from typing import Annotated, Any, Union +from typing import Annotated, Any from typing_extensions import Doc from ..knowpro.interfaces import DateRange +from .dataclasses import dataclass +from .field_helpers import CamelCaseField -EntityNames = Union[str, list[str]] +type EntityNames = str | list[str] @dataclass @@ -24,27 +25,40 @@ class RelevantKnowledge: Doc("Entity or entities who received or consumed this knowledge"), ] = None time_range: Annotated[ - DateRange | None, Doc("Time period during which this knowledge was gathered") + DateRange | None, + Doc("Time period during which this knowledge was gathered"), + CamelCaseField(field_name="time_range"), ] = None @dataclass class RelevantMessage: - from_: Annotated[EntityNames | None, Doc("Sender(s) of the message")] - to: Annotated[EntityNames | None, Doc("Recipient(s) of the message")] - timestamp: Annotated[str | None, Doc("Timestamp of the message in ISO format")] - message_text: Annotated[str | list[str] | None, Doc("Text chunks in this message")] + from_: Annotated[ + EntityNames | None, + Doc("Sender(s) of the message"), + CamelCaseField(field_name="from_"), + ] = None + to: Annotated[EntityNames | None, Doc("Recipient(s) of the message")] = None + timestamp: Annotated[ + str | None, + Doc("Timestamp of the message in ISO format"), + ] = None + message_text: Annotated[ + str | list[str] | None, + Doc("Text chunks in this message"), + CamelCaseField(field_name="message_text"), + ] = None @dataclass class AnswerContext: - """Use empty lists for unneeded properties.""" + """Use None for unneeded properties.""" entities: Annotated[ - list[RelevantKnowledge], + list[RelevantKnowledge] | None, Doc( "Relevant entities. Use the 'name' and 'type' properties of entities to PRECISELY identify those that answer the user question." ), - ] - topics: Annotated[list[RelevantKnowledge], Doc("Relevant topics")] - messages: Annotated[list[RelevantMessage], Doc("Relevant messages")] + ] = None + topics: Annotated[list[RelevantKnowledge] | None, Doc("Relevant topics")] = None + messages: Annotated[list[RelevantMessage] | None, Doc("Relevant messages")] = None diff --git a/src/typeagent/knowpro/answer_generator.py b/src/typeagent/knowpro/answer_generator.py new file mode 100644 index 00000000..c77d601d --- /dev/null +++ b/src/typeagent/knowpro/answer_generator.py @@ -0,0 +1,537 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Answer generation utilities based on AnswerContext.""" + +import asyncio +from collections.abc import Awaitable, Callable, Iterable, Sequence +from dataclasses import dataclass +from typing import Protocol + +import typechat + +from ..aitools import utils +from .answer_context import ( + answer_context_to_string, + AnswerContextChunkBuilder, + AnswerContextOptions, +) +from .answer_context_schema import AnswerContext +from .answer_response_schema import AnswerResponse +from .answers import ( + get_relevant_entities_for_answer, + get_relevant_messages_for_answer, + get_relevant_topics_for_answer, +) +from .convknowledge import create_typechat_model +from .interfaces import IConversation +from .search import ConversationSearchResult +from .searchlib import create_multiple_choice_question + +type AnswerTranslator = typechat.TypeChatJsonTranslator[AnswerResponse] + +type ProcessProgress = Callable[ + [AnswerContext, int, typechat.Result[AnswerResponse]], None +] + + +def create_answer_translator( + model: typechat.TypeChatLanguageModel, +) -> AnswerTranslator: + return utils.create_translator(model, AnswerResponse) + + +@dataclass +class AnswerGeneratorSettings: + answer_generator_model: typechat.TypeChatLanguageModel + answer_combiner_model: typechat.TypeChatLanguageModel + max_chars_in_budget: int + concurrency: int + fast_stop: bool + model_instructions: list[typechat.PromptSection] | None = None + include_context_schema: bool | None = None + + +class IAnswerGeneratorSettings(Protocol): + max_chars_in_budget: int + concurrency: int + fast_stop: bool + model_instructions: list[typechat.PromptSection] | None + include_context_schema: bool | None + + +class IAnswerGenerator(Protocol): + """Protocol for answer generators.""" + + @property + def settings(self) -> IAnswerGeneratorSettings: ... + + async def generate_answer( + self, question: str, context: AnswerContext | str, debug: bool + ) -> typechat.Result[AnswerResponse]: ... + + async def combine_partial_answers( + self, question: str, responses: Sequence[AnswerResponse | None] + ) -> typechat.Result[AnswerResponse]: ... + + +def create_answer_generator_settings( + model: typechat.TypeChatLanguageModel | None = None, +) -> AnswerGeneratorSettings: + answer_generator_model = model or create_typechat_model() + answer_combiner_model = create_typechat_model() + return AnswerGeneratorSettings( + answer_generator_model=answer_generator_model, + answer_combiner_model=answer_combiner_model, + max_chars_in_budget=4096 * 4, + concurrency=2, + fast_stop=True, + ) + + +async def generate_answer( + conversation: IConversation, + generator: IAnswerGenerator, + question: str, + search_results: ConversationSearchResult | list[ConversationSearchResult], + progress: ProcessProgress | None, + context_options: AnswerContextOptions | None, +) -> typechat.Result[AnswerResponse]: + if not isinstance(search_results, list): + return await _generate_answer_from_search_result( + conversation, + generator, + question, + search_results, + progress, + context_options, + ) + if not search_results: + return typechat.Failure("No search results") + if len(search_results) == 1: + return await _generate_answer_from_search_result( + conversation, + generator, + question, + search_results[0], + progress, + context_options, + ) + + partial_results = await _map_async( + search_results, + generator.settings.concurrency, + lambda sr: _generate_answer_from_search_result( + conversation, + generator, + question, + sr, + progress, + context_options, + ), + ) + partial_responses: list[AnswerResponse] = [] + for result in partial_results: + if isinstance(result, typechat.Failure): + return result + partial_responses.append(result.value) + return await generator.combine_partial_answers(question, partial_responses) + + +async def generate_answer_in_chunks( + answer_generator: IAnswerGenerator, + question: str, + chunks: list[AnswerContext], + progress: ProcessProgress | None, + debug: bool, +) -> typechat.Result[list[AnswerResponse]]: + if not chunks: + return typechat.Success([]) + if len(chunks) == 1: + return await _run_single_chunk( + answer_generator, question, chunks, progress, debug + ) + + chunk_answers: list[AnswerResponse] = [] + knowledge_chunks = _get_knowledge_chunks(chunks) + has_knowledge_answer = False + if knowledge_chunks: + knowledge_answers = await _run_generate_answers( + answer_generator, question, knowledge_chunks, progress, debug + ) + if isinstance(knowledge_answers, typechat.Failure): + return knowledge_answers + chunk_answers.extend(knowledge_answers.value) + has_knowledge_answer = any(a.type == "Answered" for a in chunk_answers) + + if not has_knowledge_answer or not answer_generator.settings.fast_stop: + message_chunks = [ + chunk for chunk in chunks if chunk.messages is not None and chunk.messages + ] + message_answers = await _run_generate_answers( + answer_generator, question, message_chunks, progress, debug + ) + if isinstance(message_answers, typechat.Failure): + return message_answers + chunk_answers.extend(message_answers.value) + + return typechat.Success(chunk_answers) + + +async def generate_multiple_choice_answer( + conversation: IConversation, + generator: IAnswerGenerator, + question: str, + answer_choices: list[str], + search_results: ConversationSearchResult | list[ConversationSearchResult], + progress: ProcessProgress | None, + context_options: AnswerContextOptions | None, +) -> typechat.Result[AnswerResponse]: + question = create_multiple_choice_question(question, answer_choices) + return await generate_answer( + conversation, + generator, + question, + search_results, + progress, + context_options, + ) + + +class AnswerGenerator(IAnswerGenerator): + def __init__(self, settings: AnswerGeneratorSettings | None = None) -> None: + self._settings = settings or create_answer_generator_settings() + self.answer_translator = create_answer_translator( + self.settings.answer_generator_model + ) + self.context_schema = _create_context_schema( + self.settings.answer_generator_model + ) + self.context_type_name = "AnswerContext" + + @property + def settings(self) -> AnswerGeneratorSettings: + return self._settings + + async def generate_answer( + self, question: str, context: AnswerContext | str, debug: bool + ) -> typechat.Result[AnswerResponse]: + context_content = ( + context if isinstance(context, str) else answer_context_to_string(context) + ) + if context_content and len(context_content) > self.settings.max_chars_in_budget: + context_content = trim_string_length( + context_content, self.settings.max_chars_in_budget + ) + prompt_parts = [ + create_question_prompt(question), + create_context_prompt( + self.context_type_name, + self.context_schema if self.settings.include_context_schema else "", + context_content, + ), + ] + prompt_text = "\n\n".join(prompt_parts) + if debug: + print("Stage 4 input:") + print(prompt_text.rstrip()) + print("-" * 50) + return await self.answer_translator.translate( + prompt_text, prompt_preamble=self.settings.model_instructions + ) + + async def combine_partial_answers( + self, question: str, responses: Sequence[AnswerResponse | None] + ) -> typechat.Result[AnswerResponse]: + if len(responses) == 1: + response = responses[0] + if response is not None: + return typechat.Success(response) + return typechat.Failure("No answer") + + answer_text = "" + why_no_answer: str | None = None + answer_count = 0 + for partial_answer in responses: + if partial_answer is None: + continue + if partial_answer.type == "Answered": + answer_count += 1 + if partial_answer.answer: + answer_text += f"{partial_answer.answer}\n" + else: + why_no_answer = why_no_answer or partial_answer.why_no_answer + + if answer_text: + if answer_count > 1: + answer_text = trim_string_length( + answer_text, self.settings.max_chars_in_budget + ) + rewritten = await rewrite_text( + self.settings.answer_combiner_model, answer_text, question + ) + if not rewritten: + return typechat.Failure("rewrite_answer failed") + answer_text = rewritten + return typechat.Success(AnswerResponse(type="Answered", answer=answer_text)) + + return typechat.Success( + AnswerResponse(type="NoAnswer", why_no_answer=why_no_answer or "") + ) + + +def split_context_into_chunks( + context: AnswerContext, max_chars_per_chunk: int +) -> list[AnswerContext]: + chunk_builder = AnswerContextChunkBuilder(context, max_chars_per_chunk) + return list(chunk_builder.get_chunks()) + + +async def answer_context_from_search_result( + conversation: IConversation, + search_result: ConversationSearchResult, + options: AnswerContextOptions | None, +) -> AnswerContext: + context = AnswerContext() + for knowledge_type, knowledge in search_result.knowledge_matches.items(): + match knowledge_type: + case "entity": + context.entities = await get_relevant_entities_for_answer( + conversation, + knowledge, + options.entities_top_k if options else None, + ) + case "topic": + context.topics = await get_relevant_topics_for_answer( + conversation, + knowledge, + options.topics_top_k if options else None, + ) + case _: + continue + + if search_result.message_matches: + context.messages = await get_relevant_messages_for_answer( + conversation, + search_result.message_matches, + options.messages_top_k if options else None, + ) + + return context + + +async def _generate_answer_from_search_result( + conversation: IConversation, + generator: IAnswerGenerator, + question: str, + search_result: ConversationSearchResult, + progress: ProcessProgress | None, + context_options: AnswerContextOptions | None, +) -> typechat.Result[AnswerResponse]: + context = await answer_context_from_search_result( + conversation, search_result, context_options + ) + context_content = answer_context_to_string(context) + chunking = ( + True + if context_options is None or context_options.chunking is None + else context_options.chunking + ) + if not chunking or len(context_content) <= generator.settings.max_chars_in_budget: + return await generator.generate_answer( + question, + context_content, + context_options and context_options.debug or False, + ) + + chunks = split_context_into_chunks(context, generator.settings.max_chars_in_budget) + chunk_responses = await generate_answer_in_chunks( + generator, + question, + chunks, + progress, + context_options and context_options.debug or False, + ) + if isinstance(chunk_responses, typechat.Failure): + return chunk_responses + return await generator.combine_partial_answers(question, chunk_responses.value) + + +async def _run_single_chunk( + answer_generator: IAnswerGenerator, + question: str, + chunks: list[AnswerContext], + progress: ProcessProgress | None, + debug: bool, +) -> typechat.Result[list[AnswerResponse]]: + response = await answer_generator.generate_answer(question, chunks[0], debug) + if progress: + progress(chunks[0], 0, response) + if isinstance(response, typechat.Failure): + return response + return typechat.Success([response.value]) + + +def _get_knowledge_chunks(chunks: Iterable[AnswerContext]) -> list[AnswerContext]: + structured_chunks: list[AnswerContext] = [] + for chunk in chunks: + knowledge_chunk: AnswerContext | None = None + if chunk.entities: + knowledge_chunk = knowledge_chunk or AnswerContext() + knowledge_chunk.entities = chunk.entities + if chunk.topics: + knowledge_chunk = knowledge_chunk or AnswerContext() + knowledge_chunk.topics = chunk.topics + if chunk.messages: + knowledge_chunk = knowledge_chunk or AnswerContext() + knowledge_chunk.messages = chunk.messages + if knowledge_chunk is not None: + structured_chunks.append(knowledge_chunk) + return structured_chunks + + +async def _run_generate_answers( + answer_generator: IAnswerGenerator, + question: str, + chunks: list[AnswerContext], + progress: ProcessProgress | None, + debug: bool, +) -> typechat.Result[list[AnswerResponse]]: + if not chunks: + return typechat.Success([]) + + results = await _map_async( + chunks, + answer_generator.settings.concurrency, + lambda chunk: answer_generator.generate_answer(question, chunk, debug), + progress, + lambda _chunk, _index, response: not ( + isinstance(response, typechat.Success) and response.value.type == "Answered" + ), + ) + return _flatten_results(results) + + +def _create_context_schema(model: typechat.TypeChatLanguageModel) -> str: + validator = typechat.TypeChatValidator[AnswerContext](AnswerContext) + translator = typechat.TypeChatJsonTranslator[AnswerContext]( + model, validator, AnswerContext + ) + return translator.schema_str.rstrip() + + +def create_question_prompt(question: str) -> str: + prompt_lines = [ + "The following is a user question:", + "===", + question, + "===", + "- The included [ANSWER CONTEXT] contains information that MAY be relevant to answering the question.", + "- Answer the user question PRECISELY using ONLY information EXPLICITLY provided in the topics, entities, actions, messages and time ranges/timestamps found in [ANSWER CONTEXT]", + "- Return 'NoAnswer' if you are unsure, , if the answer is not explicitly in [ANSWER CONTEXT], or if the topics or {entity names, types and facets} in the question are not found in [ANSWER CONTEXT].", + "- Use the 'name', 'type' and 'facets' properties of the provided JSON entities to identify those highly relevant to answering the question.", + "- 'origin' and 'audience' fields contain the names of entities involved in communication about the knowledge", + "**Important:** Communicating DOES NOT imply associations such as authorship, ownership etc. E.g. origin: [X] telling audience [Y, Z] communicating about a book does not imply authorship.", + "- When asked for lists, ensure the list contents answer the question and nothing else. E.g. for the question 'List all books': List only the books in [ANSWER CONTEXT].", + "- Use direct quotes only when needed or asked. Otherwise answer in your own words.", + "- Your answer is readable and complete, with appropriate formatting: line breaks, numbered lists, bullet points etc.", + ] + return "\n".join(prompt_lines) + + +def create_context_prompt(type_name: str, schema: str, context: str) -> str: + content = "" + if schema: + content += ( + "[ANSWER CONTEXT] for answering user questions is a JSON object of type " + f"{type_name} according to the following TypeScript definitions:\n" + f"```\n{schema}\n```\n" + ) + content += f"[ANSWER CONTEXT]\n===\n{context}\n===\n" + return content + + +def trim_string_length(text: str, max_chars: int) -> str: + if max_chars <= 0 or len(text) <= max_chars: + return text + return text[:max_chars] + "..." + + +async def rewrite_text( + model: typechat.TypeChatLanguageModel, text: str, question: str +) -> str | None: + prompt = [ + typechat.PromptSection( + role="system", + content=( + "Rewrite the partial answers into a single concise answer that " + "directly addresses the original question." + ), + ), + typechat.PromptSection( + role="user", + content=( + f"Question:\n{question}\n\nPartial answers:\n{text}\n\n" + "Rewrite into a single answer:" + ), + ), + ] + result = await model.complete(prompt) + if isinstance(result, typechat.Failure): + return None + return result.value.strip() + + +async def _map_async[TItem, TResult]( + items: list[TItem], + concurrency: int, + worker: Callable[[TItem], Awaitable[typechat.Result[TResult]]], + progress: Callable[[TItem, int, typechat.Result[TResult]], None] | None = None, + should_continue: ( + Callable[[TItem, int, typechat.Result[TResult]], bool] | None + ) = None, +) -> list[typechat.Result[TResult]]: + if not items: + return [] + + queue: asyncio.Queue[tuple[int, TItem]] = asyncio.Queue() + for index, item in enumerate(items): + queue.put_nowait((index, item)) + + results: list[typechat.Result[TResult] | None] = [None] * len(items) + stop_event = asyncio.Event() + + async def run_worker() -> None: + while True: + if stop_event.is_set(): + return + try: + index, item = queue.get_nowait() + except asyncio.QueueEmpty: + return + try: + result = await worker(item) + results[index] = result + if progress: + progress(item, index, result) + if should_continue and not should_continue(item, index, result): + stop_event.set() + finally: + queue.task_done() + + worker_count = max(1, min(concurrency, len(items))) + tasks = [asyncio.create_task(run_worker()) for _ in range(worker_count)] + await asyncio.gather(*tasks) + + return [result for result in results if result is not None] + + +def _flatten_results[TResult]( + results: list[typechat.Result[TResult]], +) -> typechat.Result[list[TResult]]: + values: list[TResult] = [] + for result in results: + if isinstance(result, typechat.Failure): + return result + values.append(result.value) + return typechat.Success(values) diff --git a/src/typeagent/knowpro/answer_response_schema.py b/src/typeagent/knowpro/answer_response_schema.py index e33fea17..86a6274f 100644 --- a/src/typeagent/knowpro/answer_response_schema.py +++ b/src/typeagent/knowpro/answer_response_schema.py @@ -6,8 +6,9 @@ from typing_extensions import Doc from .dataclasses import dataclass +from .field_helpers import CamelCaseField -AnswerType = Literal[ +type AnswerType = Literal[ "NoAnswer", # If question cannot be accurately answered from [ANSWER CONTEXT] "Answered", # Fully answer question # TODO: Add a category for outright errors, e.g. network errors @@ -33,4 +34,5 @@ class AnswerResponse: Doc( "If NoAnswer, explain why..\nparticularly explain why you didn't use any supplied entities" ), + CamelCaseField(field_name="why_no_answer"), ] = None diff --git a/src/typeagent/knowpro/answers.py b/src/typeagent/knowpro/answers.py index ae6fad98..1413b5d6 100644 --- a/src/typeagent/knowpro/answers.py +++ b/src/typeagent/knowpro/answers.py @@ -1,14 +1,13 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -from collections.abc import Iterable +from collections.abc import Iterable, Sequence from dataclasses import dataclass from typing import Any -import black - import typechat +from .answer_context import answer_context_to_string, AnswerContextOptions from .answer_context_schema import AnswerContext, RelevantKnowledge, RelevantMessage from .answer_response_schema import AnswerResponse from .collections import get_top_k, Scored @@ -32,17 +31,77 @@ TextRange, Topic, ) -from .kplib import ConcreteEntity, Facet +from .knowledge_schema import ConcreteEntity, Facet from .search import ConversationSearchResult @dataclass -class AnswerContextOptions: - entities_top_k: int | None = None - topics_top_k: int | None = None - messages_top_k: int | None = None - chunking: bool | None = None - debug: bool = False +class _TranslatorAnswerGeneratorSettings: + max_chars_in_budget: int = 4096 * 4 + concurrency: int = 2 + fast_stop: bool = True + model_instructions: list[typechat.PromptSection] | None = None + include_context_schema: bool | None = None + + +class _TranslatorAnswerGenerator: + def __init__( + self, + translator: typechat.TypeChatJsonTranslator[AnswerResponse], + settings: _TranslatorAnswerGeneratorSettings | None = None, + ) -> None: + self._settings = settings or _TranslatorAnswerGeneratorSettings() + self._translator = translator + + @property + def settings(self) -> _TranslatorAnswerGeneratorSettings: + return self._settings + + async def generate_answer( + self, question: str, context: AnswerContext | str, debug: bool + ) -> typechat.Result[AnswerResponse]: + from . import answer_generator as answer_gen + + context_content = ( + context if isinstance(context, str) else answer_context_to_string(context) + ) + if context_content and len(context_content) > self.settings.max_chars_in_budget: + context_content = answer_gen.trim_string_length( + context_content, self.settings.max_chars_in_budget + ) + request = ( + f"{answer_gen.create_question_prompt(question)}\n\n" + f"{answer_gen.create_context_prompt('AnswerContext', '', context_content)}" + ) + if debug: + print("Stage 4 input:") + print(request.rstrip()) + print("-" * 50) + result = await self._translator.translate(request) + return result + + async def combine_partial_answers( + self, question: str, responses: Sequence[AnswerResponse | None] + ) -> typechat.Result[AnswerResponse]: + good_answers: list[str] = [] + why_no_answer: str | None = None + for response in responses: + if response is None: + continue + if response.type == "Answered" and response.answer: + good_answers.append(response.answer) + elif response.type == "NoAnswer": + why_no_answer = why_no_answer or response.why_no_answer + if not good_answers: + return typechat.Success( + AnswerResponse(type="NoAnswer", why_no_answer=why_no_answer or "") + ) + if len(good_answers) == 1: + return typechat.Success( + AnswerResponse(type="Answered", answer=good_answers[0]) + ) + combined = await combine_answers(self._translator, good_answers, question) + return typechat.Success(combined) async def generate_answers( @@ -50,12 +109,15 @@ async def generate_answers( search_results: list[ConversationSearchResult], conversation: IConversation, orig_query_text: str, - options: AnswerContextOptions | None = None, + options: AnswerContextOptions | None, ) -> tuple[list[AnswerResponse], AnswerResponse]: # (all answers, combined answer) + generator = _TranslatorAnswerGenerator(translator) all_answers: list[AnswerResponse] = [] good_answers: list[str] = [] for result in search_results: - answer = await generate_answer(translator, result, conversation, options) + answer = await generate_answer( + translator, result, conversation, options, generator=generator + ) all_answers.append(answer) match answer.type: case "Answered": @@ -87,24 +149,29 @@ async def generate_answer[TMessage: IMessage, TIndex: ITermToSemanticRefIndex]( translator: typechat.TypeChatJsonTranslator[AnswerResponse], search_result: ConversationSearchResult, conversation: IConversation[TMessage, TIndex], - options: AnswerContextOptions | None = None, + options: AnswerContextOptions | None, + *, + generator: _TranslatorAnswerGenerator | None = None, ) -> AnswerResponse: assert search_result.raw_query_text is not None, "Raw query text must not be None" - context = await make_context(search_result, conversation, options) - request = f"{create_question_prompt(search_result.raw_query_text)}\n\n{create_context_prompt(context)}" - if options and options.debug: - print("Stage 4 input:") - print(request) - print("-" * 50) - result = await translator.translate(request) + from . import answer_generator as answer_gen + + generator = generator or _TranslatorAnswerGenerator(translator) + result = await answer_gen.generate_answer( + conversation, + generator, + search_result.raw_query_text, + search_result, + progress=None, + context_options=options, + ) if isinstance(result, typechat.Failure): return AnswerResponse( type="NoAnswer", answer=None, why_no_answer=f"TypeChat failure: {result.message}", ) - else: - return result.value + return result.value def create_question_prompt(question: str) -> str: @@ -127,10 +194,12 @@ def create_question_prompt(question: str) -> str: def create_context_prompt(context: AnswerContext) -> str: # TODO: Use a more compact representation of the context than JSON. + import pprint + prompt = [ "[ANSWER CONTEXT]", "===", - black.format_str(str(dictify(context)), mode=black.Mode(line_length=200)), + pprint.pformat(dictify(context), width=200), "===", ] return "\n".join(prompt) @@ -163,7 +232,7 @@ async def make_context[TMessage: IMessage, TIndex: ITermToSemanticRefIndex]( conversation: IConversation[TMessage, TIndex], options: AnswerContextOptions | None = None, ) -> AnswerContext: - context = AnswerContext([], [], []) + context = AnswerContext() if search_result.message_matches: context.messages = await get_relevant_messages_for_answer( @@ -387,12 +456,9 @@ def text_range_from_message_range( ) -> TextRange | None: if start == end: # Point location - return TextRange(start=TextLocation(start)) + return TextRange(TextLocation(start)) elif start < end: - return TextRange( - start=TextLocation(start), - end=TextLocation(end), - ) + return TextRange(TextLocation(start), TextLocation(end)) else: raise ValueError(f"Expect message ordinal range: {start} <= {end}") @@ -404,14 +470,15 @@ async def get_enclosing_date_range_for_text_range( start_timestamp = (await messages.get_item(range.start.message_ordinal)).timestamp if not start_timestamp: return None - end_timestamp = ( - (await messages.get_item(range.end.message_ordinal)).timestamp - if range.end - else None - ) + end_timestamp: str | None = None + if range.end: + end_ordinal = range.end.message_ordinal + if end_ordinal < await messages.size(): + end_timestamp = (await messages.get_item(end_ordinal)).timestamp + # else: range extends to the end of the conversation; leave as None. return DateRange( - start=Datetime.fromisoformat(start_timestamp), - end=Datetime.fromisoformat(end_timestamp) if end_timestamp else None, + Datetime.fromisoformat(start_timestamp), + Datetime.fromisoformat(end_timestamp) if end_timestamp else None, ) @@ -451,19 +518,22 @@ async def get_scored_semantic_refs_from_ordinals_iter( semantic_ref_matches: list[ScoredSemanticRefOrdinal], knowledge_type: KnowledgeType, ) -> list[Scored[SemanticRef]]: - result = [] - for semantic_ref_match in semantic_ref_matches: - semantic_ref = await semantic_refs.get_item( - semantic_ref_match.semantic_ref_ordinal - ) - if semantic_ref.knowledge.knowledge_type == knowledge_type: - result.append( - Scored( - item=semantic_ref, - score=semantic_ref_match.score, - ) - ) - return result + if not semantic_ref_matches: + return [] + ordinals = [m.semantic_ref_ordinal for m in semantic_ref_matches] + metadata = await semantic_refs.get_metadata_multiple(ordinals) + matching = [ + (sr_match, m.ordinal) + for sr_match, m in zip(semantic_ref_matches, metadata) + if m.knowledge_type == knowledge_type + ] + if not matching: + return [] + full_refs = await semantic_refs.get_multiple([o for _, o in matching]) + return [ + Scored(item=ref, score=sr_match.score) + for (sr_match, _), ref in zip(matching, full_refs) + ] def merge_scored_concrete_entities( @@ -535,7 +605,7 @@ def facets_to_merged_facets(facets: list[Facet]) -> MergedFacets: merged_facets: MergedFacets = {} for facet in facets: name = facet.name.lower() - value = str(facet).lower() + value = str(facet.value).lower() merged_facets.setdefault(name, []).append(value) return merged_facets diff --git a/src/typeagent/knowpro/collections.py b/src/typeagent/knowpro/collections.py index 1e5205f3..975926a3 100644 --- a/src/typeagent/knowpro/collections.py +++ b/src/typeagent/knowpro/collections.py @@ -91,10 +91,14 @@ def add(self, value: T, score: float, is_exact_match: bool = True) -> None: ) ) else: + # New related-only match: hit_count stays 0 because + # only exact matches count as direct hits. This matters + # for select_with_hit_count / _matches_with_min_hit_count + # which filter on hit_count to weed out noise. self.set_match( Match( value, - hit_count=1, + hit_count=0, score=0.0, related_hit_count=1, related_score=score, @@ -250,9 +254,11 @@ def smooth_match_score[T](match: Match[T]) -> None: class SemanticRefAccumulator(MatchAccumulator[SemanticRefOrdinal]): - def __init__(self, search_term_matches: set[str] = set()): + def __init__(self, search_term_matches: set[str] | None = None): super().__init__() - self.search_term_matches = search_term_matches + self.search_term_matches = ( + search_term_matches if search_term_matches is not None else set() + ) def add_term_matches( self, @@ -325,14 +331,17 @@ async def group_matches_by_type( self, semantic_refs: ISemanticRefCollection, ) -> dict[KnowledgeType, "SemanticRefAccumulator"]: + matches = list(self) + if not matches: + return {} + ordinals = [match.value for match in matches] + metadata = await semantic_refs.get_metadata_multiple(ordinals) groups: dict[KnowledgeType, SemanticRefAccumulator] = {} - for match in self: - semantic_ref = await semantic_refs.get_item(match.value) - group = groups.get(semantic_ref.knowledge.knowledge_type) + for match, m in zip(matches, metadata): + group = groups.get(m.knowledge_type) if group is None: - group = SemanticRefAccumulator() - group.search_term_matches = self.search_term_matches - groups[semantic_ref.knowledge.knowledge_type] = group + group = SemanticRefAccumulator(self.search_term_matches) + groups[m.knowledge_type] = group group.set_match(match) return groups @@ -341,11 +350,14 @@ async def get_matches_in_scope( semantic_refs: ISemanticRefCollection, ranges_in_scope: "TextRangesInScope", ) -> "SemanticRefAccumulator": + matches = list(self) + if not matches: + return SemanticRefAccumulator(self.search_term_matches) + ordinals = [match.value for match in matches] + metadata = await semantic_refs.get_metadata_multiple(ordinals) accumulator = SemanticRefAccumulator(self.search_term_matches) - for match in self: - if ranges_in_scope.is_range_in_scope( - (await semantic_refs.get_item(match.value)).range - ): + for match, m in zip(matches, metadata): + if ranges_in_scope.is_range_in_scope(m.range): accumulator.set_match(match) return accumulator @@ -513,14 +525,15 @@ def add_ranges(self, text_ranges: "list[TextRange] | TextRangeCollection") -> No for text_range in text_ranges._ranges: self.add_range(text_range) - def is_in_range(self, inner_range: TextRange) -> bool: - if len(self._ranges) == 0: + def contains_range(self, inner_range: TextRange) -> bool: + if not self._ranges: return False - i = bisect.bisect_left(self._ranges, inner_range) - for outer_range in self._ranges[i:]: - if outer_range.start > inner_range.start: - break - if inner_range in outer_range: + # Bisect on start only to find all ranges with start <= inner.start, + # then scan backwards — the most likely containing range has the + # largest start still <= inner's. + hi = bisect.bisect_right(self._ranges, inner_range.start, key=lambda r: r.start) + for i in range(hi - 1, -1, -1): + if inner_range in self._ranges[i]: return True return False @@ -544,7 +557,7 @@ def is_range_in_scope(self, inner_range: TextRange) -> bool: # We have a very simple impl: we don't intersect/union ranges yet. # Instead, we ensure that the inner range is not rejected by any outer ranges. for outer_ranges in self.text_ranges: - if not outer_ranges.is_in_range(inner_range): + if not outer_ranges.contains_range(inner_range): return False return True diff --git a/src/typeagent/knowpro/conversation_base.py b/src/typeagent/knowpro/conversation_base.py index 4fba68e3..131b0ceb 100644 --- a/src/typeagent/knowpro/conversation_base.py +++ b/src/typeagent/knowpro/conversation_base.py @@ -3,6 +3,7 @@ """Base class for conversations with incremental indexing support.""" +from collections.abc import AsyncIterable, Callable from dataclasses import dataclass from datetime import datetime, timezone from typing import Generic, Self, TypeVar @@ -13,12 +14,14 @@ answer_response_schema, answers, convknowledge, - kplib, +) +from . import ( search_query_schema, searchlang, secindex, ) -from ..aitools import utils +from . import knowledge_schema as kplib +from ..aitools import model_adapters, utils from ..storage.memory import semrefindex from .convsettings import ConversationSettings from .interfaces import ( @@ -34,6 +37,8 @@ MessageOrdinal, Topic, ) +from .knowledge import extract_knowledge_from_text_batch +from .messageutils import get_all_message_chunk_locations TMessage = TypeVar("TMessage", bound=IMessage) @@ -130,9 +135,12 @@ async def add_messages_with_indexing( Args: messages: Messages to add - source_ids: Optional list of source IDs to mark as ingested. These are - marked within the same transaction, so if the indexing fails, the - source IDs won't be marked as ingested (for SQLite storage). + source_ids: Optional explicit list of source IDs to mark as ingested, + one per message. When ``None`` (the default), each message's + ``source_id`` attribute is used instead — messages whose + ``source_id`` is ``None`` are silently skipped. These are marked + within the same transaction, so if the indexing fails, the source + IDs won't be marked as ingested (for SQLite storage). Returns: Result with counts of messages/semrefs added @@ -141,7 +149,7 @@ async def add_messages_with_indexing( Exception: Any error """ storage = await self.settings.get_storage_provider() - if source_ids: + if source_ids is not None: if len(source_ids) != len(messages): raise ValueError( f"Length of source_ids {len(source_ids)} " @@ -150,9 +158,13 @@ async def add_messages_with_indexing( async with storage: # Mark source IDs as ingested (will be rolled back on error) - if source_ids: - for source_id in source_ids: - storage.mark_source_ingested(source_id) + if source_ids is not None: + for sid in source_ids: + await storage.mark_source_ingested(sid) + else: + for msg in messages: + if msg.source_id is not None: + await storage.mark_source_ingested(msg.source_id) start_points = IndexingStartPoints( message_count=await self.messages.size(), @@ -171,19 +183,200 @@ async def add_messages_with_indexing( await self._update_secondary_indexes_incremental(start_points) + messages_added = await self.messages.size() - start_points.message_count + chunks_added = sum(len(m.text_chunks) for m in messages[:messages_added]) result = AddMessagesResult( - messages_added=await self.messages.size() - start_points.message_count, + messages_added=messages_added, + chunks_added=chunks_added, semrefs_added=await self.semantic_refs.size() - start_points.semref_count, ) # Update the updated_at timestamp - storage.update_conversation_timestamps( + await storage.update_conversation_timestamps( updated_at=datetime.now(timezone.utc) ) return result + async def add_messages_streaming( + self, + messages: AsyncIterable[TMessage], + *, + batch_size: int = 100, + on_batch_committed: Callable[[AddMessagesResult], None] | None = None, + ) -> AddMessagesResult: + """Add messages from an async iterable, committing in batches. + + Unlike ``add_messages_with_indexing`` which processes all messages in a + single transaction, this method buffers messages into batches of + ``batch_size``, processes each batch in its own transaction, and commits + after every batch. This makes it suitable for ingesting large streams + (millions of messages) where a single all-or-nothing transaction would + be impractical. + + **Source-ID tracking**: each message's ``source_id`` (if not ``None``) + is checked before ingestion. Already-ingested sources are silently + skipped. Newly ingested sources are marked within the same transaction. + + **Extraction failures**: when knowledge extraction returns a + ``Failure`` for a chunk, the failure is recorded via + ``storage.record_chunk_failure`` and processing continues with the + remaining chunks. Raised exceptions (HTTP errors, timeouts, etc.) + are treated as systemic and stop the run immediately — the current + batch is rolled back and the exception propagates. + + Args: + messages: An async iterable of messages to ingest. + batch_size: Number of messages per commit batch. + on_batch_committed: Optional callback invoked after each batch is + committed, receiving the batch's ``AddMessagesResult``. + + Returns: + Cumulative ``AddMessagesResult`` across all committed batches. + """ + storage = await self.settings.get_storage_provider() + total_messages_added = 0 + total_semrefs_added = 0 + total_chunks_added = 0 + + batch: list[TMessage] = [] + async for msg in messages: + batch.append(msg) + if len(batch) >= batch_size: + result = await self._ingest_batch_streaming(storage, batch) + total_messages_added += result.messages_added + total_semrefs_added += result.semrefs_added + total_chunks_added += result.chunks_added + if on_batch_committed: + on_batch_committed(result) + batch = [] + + # Flush remaining messages + if batch: + result = await self._ingest_batch_streaming(storage, batch) + total_messages_added += result.messages_added + total_semrefs_added += result.semrefs_added + total_chunks_added += result.chunks_added + if on_batch_committed: + on_batch_committed(result) + + return AddMessagesResult( + messages_added=total_messages_added, + chunks_added=total_chunks_added, + semrefs_added=total_semrefs_added, + ) + + async def _ingest_batch_streaming( + self, + storage: IStorageProvider[TMessage], + batch: list[TMessage], + ) -> AddMessagesResult: + """Process and commit a single batch within a transaction. + + Messages whose ``source_id`` is already ingested are filtered out. + Extraction ``Failure``\\s are recorded as chunk failures. + """ + # Filter out already-ingested sources + filtered: list[TMessage] = [] + for msg in batch: + if msg.source_id is not None and await storage.is_source_ingested( + msg.source_id + ): + continue + filtered.append(msg) + + if not filtered: + return AddMessagesResult() + + async with storage: + start_points = IndexingStartPoints( + message_count=await self.messages.size(), + semref_count=await self.semantic_refs.size(), + ) + + await self.messages.extend(filtered) + + # Mark source IDs as ingested (rolled back on error) + for msg in filtered: + if msg.source_id is not None: + await storage.mark_source_ingested(msg.source_id) + + await self._add_metadata_knowledge_incremental(start_points.message_count) + + if self.settings.semantic_ref_index_settings.auto_extract_knowledge: + await self._add_llm_knowledge_streaming( + storage, filtered, start_points.message_count + ) + + await self._update_secondary_indexes_incremental(start_points) + + await storage.update_conversation_timestamps( + updated_at=datetime.now(timezone.utc) + ) + + messages_added = await self.messages.size() - start_points.message_count + chunks_added = sum(len(m.text_chunks) for m in filtered[:messages_added]) + return AddMessagesResult( + messages_added=messages_added, + chunks_added=chunks_added, + semrefs_added=await self.semantic_refs.size() + - start_points.semref_count, + ) + + async def _add_llm_knowledge_streaming( + self, + storage: IStorageProvider[TMessage], + messages: list[TMessage], + start_from_message_ordinal: int, + ) -> None: + """Extract LLM knowledge, recording failures instead of raising. + + On ``Failure``: records a chunk failure via the storage provider and + continues. On a raised exception: lets it propagate (the caller's + ``async with storage`` will roll back the transaction). + """ + settings = self.settings.semantic_ref_index_settings + knowledge_extractor = ( + settings.knowledge_extractor or convknowledge.KnowledgeExtractor() + ) + + text_locations = get_all_message_chunk_locations( + messages, start_from_message_ordinal + ) + if not text_locations: + return + + start_ordinal = text_locations[0].message_ordinal + text_batch: list[str] = [] + for tl in text_locations: + list_index = tl.message_ordinal - start_ordinal + text_batch.append( + messages[list_index].text_chunks[tl.chunk_ordinal].strip() + ) + + knowledge_results = await extract_knowledge_from_text_batch( + knowledge_extractor, + text_batch, + settings.concurrency, + ) + for i, knowledge_result in enumerate(knowledge_results): + tl = text_locations[i] + if isinstance(knowledge_result, typechat.Failure): + await storage.record_chunk_failure( + tl.message_ordinal, + tl.chunk_ordinal, + type(knowledge_result).__name__, + knowledge_result.message[:500], + ) + continue + await semrefindex.add_knowledge_to_semantic_ref_index( + self, + tl.message_ordinal, + tl.chunk_ordinal, + knowledge_result.value, + ) + async def _add_metadata_knowledge_incremental( self, start_from_message_ordinal: int, @@ -214,21 +407,17 @@ async def _add_llm_knowledge_incremental( settings.knowledge_extractor or convknowledge.KnowledgeExtractor() ) - # Get batches of text locations from the message list - from .messageutils import get_message_chunk_batch_from_list - - batches = get_message_chunk_batch_from_list( + text_locations = get_all_message_chunk_locations( messages, start_from_message_ordinal, - settings.batch_size, ) - for text_location_batch in batches: - await semrefindex.add_batch_to_semantic_ref_index_from_list( - self, - messages, - text_location_batch, - knowledge_extractor, - ) + await semrefindex.add_batch_to_semantic_ref_index_from_list( + self, + messages, + text_locations, + knowledge_extractor, + concurrency=settings.concurrency, + ) async def _update_secondary_indexes_incremental( self, @@ -352,12 +541,12 @@ async def query( """ # Create translators lazily (once per conversation instance) if self._query_translator is None: - model = convknowledge.create_typechat_model() + model = model_adapters.create_chat_model() self._query_translator = utils.create_translator( model, search_query_schema.SearchQuery ) if self._answer_translator is None: - model = convknowledge.create_typechat_model() + model = model_adapters.create_chat_model() self._answer_translator = utils.create_translator( model, answer_response_schema.AnswerResponse ) diff --git a/src/typeagent/knowpro/convknowledge.py b/src/typeagent/knowpro/convknowledge.py index 4bea97d4..49fde42c 100644 --- a/src/typeagent/knowpro/convknowledge.py +++ b/src/typeagent/knowpro/convknowledge.py @@ -1,67 +1,17 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -import asyncio from dataclasses import dataclass, field -import os import typechat -from . import kplib -from ..aitools import auth - -# TODO: Move ModelWrapper and create_typechat_model() to aitools package. - - -# TODO: Make these parameters that can be configured (e.g. from command line). -DEFAULT_MAX_RETRY_ATTEMPTS = 0 -DEFAULT_TIMEOUT_SECONDS = 25 - - -class ModelWrapper(typechat.TypeChatLanguageModel): - def __init__( - self, - base_model: typechat.TypeChatLanguageModel, - token_provider: auth.AzureTokenProvider, - ): - self.base_model = base_model - self.token_provider = token_provider - - async def complete( - self, prompt: str | list[typechat.PromptSection] - ) -> typechat.Result[str]: - if self.token_provider.needs_refresh(): - loop = asyncio.get_running_loop() - api_key = await loop.run_in_executor( - None, self.token_provider.refresh_token - ) - env: dict[str, str | None] = dict(os.environ) - key_name = "AZURE_OPENAI_API_KEY" - env[key_name] = api_key - self.base_model = typechat.create_language_model(env) - self.base_model.timeout_seconds = DEFAULT_TIMEOUT_SECONDS - return await self.base_model.complete(prompt) - - -def create_typechat_model() -> typechat.TypeChatLanguageModel: - env: dict[str, str | None] = dict(os.environ) - key_name = "AZURE_OPENAI_API_KEY" - key = env.get(key_name) - shared_token_provider: auth.AzureTokenProvider | None = None - if key is not None and key.lower() == "identity": - shared_token_provider = auth.get_shared_token_provider() - env[key_name] = shared_token_provider.get_token() - model = typechat.create_language_model(env) - model.timeout_seconds = DEFAULT_TIMEOUT_SECONDS - model.max_retry_attempts = DEFAULT_MAX_RETRY_ATTEMPTS - if shared_token_provider is not None: - model = ModelWrapper(model, shared_token_provider) - return model +from . import knowledge_schema as kplib +from ..aitools.model_adapters import create_chat_model @dataclass class KnowledgeExtractor: - model: typechat.TypeChatLanguageModel = field(default_factory=create_typechat_model) + model: typechat.TypeChatLanguageModel = field(default_factory=create_chat_model) max_chars_per_chunk: int = 2048 merge_action_knowledge: bool = ( False # TODO: Implement merge_action_knowledge_into_response diff --git a/src/typeagent/knowpro/convsettings.py b/src/typeagent/knowpro/convsettings.py index 627546ed..97c2bee2 100644 --- a/src/typeagent/knowpro/convsettings.py +++ b/src/typeagent/knowpro/convsettings.py @@ -5,7 +5,8 @@ from dataclasses import dataclass -from ..aitools.embeddings import AsyncEmbeddingModel +from ..aitools.embeddings import IEmbeddingModel +from ..aitools.model_adapters import create_embedding_model from ..aitools.vectorbase import TextEmbeddingIndexSettings from .interfaces import IKnowledgeExtractor, IStorageProvider @@ -28,7 +29,7 @@ def __init__(self, embedding_index_settings: TextEmbeddingIndexSettings): @dataclass class SemanticRefIndexSettings: - batch_size: int + concurrency: int auto_extract_knowledge: bool knowledge_extractor: IKnowledgeExtractor | None = None @@ -38,11 +39,11 @@ class ConversationSettings: def __init__( self, - model: AsyncEmbeddingModel | None = None, + model: IEmbeddingModel | None = None, storage_provider: IStorageProvider | None = None, ): # All settings share the same model, so they share the embedding cache. - model = model or AsyncEmbeddingModel() + model = model or create_embedding_model() self.embedding_model = model min_score = 0.85 self.related_term_index_settings = RelatedTermIndexSettings( @@ -53,7 +54,7 @@ def __init__( TextEmbeddingIndexSettings(model, min_score=0.7) ) self.semantic_ref_index_settings = SemanticRefIndexSettings( - batch_size=4, # Effectively max concurrency + concurrency=4, auto_extract_knowledge=True, # The high-level API wants this ) diff --git a/src/typeagent/knowpro/fuzzyindex.py b/src/typeagent/knowpro/fuzzyindex.py index 6ace1b34..97138e6c 100644 --- a/src/typeagent/knowpro/fuzzyindex.py +++ b/src/typeagent/knowpro/fuzzyindex.py @@ -137,7 +137,7 @@ def deserialize(self, embeddings: NormalizedEmbedding) -> None: assert embeddings.dtype == np.float32, embeddings.dtype assert embeddings.ndim == 2, embeddings.shape assert ( - embeddings.shape[1] == self._vector_base._embedding_size + self._vector_base._embedding_size == 0 + or embeddings.shape[1] == self._vector_base._embedding_size ), embeddings.shape - self.clear() - self.push(embeddings) + self._vector_base.deserialize(embeddings) diff --git a/src/typeagent/knowpro/interfaces_core.py b/src/typeagent/knowpro/interfaces_core.py index 4e53f27c..87ef7329 100644 --- a/src/typeagent/knowpro/interfaces_core.py +++ b/src/typeagent/knowpro/interfaces_core.py @@ -4,6 +4,7 @@ from __future__ import annotations +from collections.abc import Sequence from datetime import datetime as Datetime from typing import ( Any, @@ -19,7 +20,7 @@ from pydantic.dataclasses import dataclass import typechat -from . import kplib +from . import knowledge_schema as kplib from .field_helpers import CamelCaseField __all__ = [ @@ -89,8 +90,9 @@ class IndexingStartPoints: class AddMessagesResult: """Result of add_messages_with_indexing operation.""" - messages_added: int - semrefs_added: int + messages_added: int = 0 + chunks_added: int = 0 + semrefs_added: int = 0 # Messages are referenced by their sequential ordinal numbers. @@ -128,6 +130,12 @@ class IMessage[TMetadata: IMessageMetadata](IKnowledgeSource, Protocol): # Metadata associated with the message such as its source. metadata: TMetadata | None = None + # Optional external identifier of the source this message was ingested from + # (e.g., an email ID, a file path, a URL). Used by ingestion pipelines to + # detect already-ingested sources for restartability. None means the message + # is not associated with an external source (e.g., synthesized in tests). + source_id: str | None = None + # Semantic references are also ordinal. type SemanticRefOrdinal = int @@ -168,6 +176,11 @@ async def add_term( semantic_ref_ordinal: SemanticRefOrdinal | ScoredSemanticRefOrdinal, ) -> str: ... + async def add_terms_batch( + self, + terms: Sequence[tuple[str, SemanticRefOrdinal | ScoredSemanticRefOrdinal]], + ) -> None: ... + async def remove_term( self, term: str, semantic_ref_ordinal: SemanticRefOrdinal ) -> None: ... @@ -249,32 +262,24 @@ def __repr__(self) -> str: else: return f"{self.__class__.__name__}({self.start}, {self.end})" + @staticmethod + def _effective_end(tr: "TextRange") -> tuple[int, int]: + """Return (message_ordinal, chunk_ordinal) for the effective end.""" + if tr.end is not None: + return (tr.end.message_ordinal, tr.end.chunk_ordinal) + return (tr.start.message_ordinal, tr.start.chunk_ordinal + 1) + def __eq__(self, other: object) -> bool: if not isinstance(other, TextRange): return NotImplemented - if self.start != other.start: return False - - # Get the effective end for both ranges - self_end = self.end or TextLocation( - self.start.message_ordinal, self.start.chunk_ordinal + 1 - ) - other_end = other.end or TextLocation( - other.start.message_ordinal, other.start.chunk_ordinal + 1 - ) - return self_end == other_end + return TextRange._effective_end(self) == TextRange._effective_end(other) def __lt__(self, other: Self) -> bool: if self.start != other.start: return self.start < other.start - self_end = self.end or TextLocation( - self.start.message_ordinal, self.start.chunk_ordinal + 1 - ) - other_end = other.end or TextLocation( - other.start.message_ordinal, other.start.chunk_ordinal + 1 - ) - return self_end < other_end + return TextRange._effective_end(self) < TextRange._effective_end(other) def __gt__(self, other: Self) -> bool: return other.__lt__(self) @@ -286,13 +291,9 @@ def __le__(self, other: Self) -> bool: return not other.__lt__(self) def __contains__(self, other: Self) -> bool: - other_end = other.end or TextLocation( - other.start.message_ordinal, other.start.chunk_ordinal + 1 - ) - self_end = self.end or TextLocation( - self.start.message_ordinal, self.start.chunk_ordinal + 1 - ) - return self.start <= other.start and other_end <= self_end + if not (self.start <= other.start): + return False + return TextRange._effective_end(other) <= TextRange._effective_end(self) def serialize(self) -> TextRangeData: return self.__pydantic_serializer__.to_python( # type: ignore diff --git a/src/typeagent/knowpro/interfaces_indexes.py b/src/typeagent/knowpro/interfaces_indexes.py index a894ab88..c872fa7c 100644 --- a/src/typeagent/knowpro/interfaces_indexes.py +++ b/src/typeagent/knowpro/interfaces_indexes.py @@ -59,6 +59,13 @@ async def add_property( semantic_ref_ordinal: SemanticRefOrdinal | ScoredSemanticRefOrdinal, ) -> None: ... + async def add_properties_batch( + self, + properties: Sequence[ + tuple[str, str, SemanticRefOrdinal | ScoredSemanticRefOrdinal] + ], + ) -> None: ... + async def lookup_property( self, property_name: str, value: str ) -> list[ScoredSemanticRefOrdinal] | None: ... diff --git a/src/typeagent/knowpro/interfaces_search.py b/src/typeagent/knowpro/interfaces_search.py index 4ff20c7f..c3727d21 100644 --- a/src/typeagent/knowpro/interfaces_search.py +++ b/src/typeagent/knowpro/interfaces_search.py @@ -18,13 +18,14 @@ ) __all__ = [ - "SearchTerm", "KnowledgePropertyName", "PropertySearchTerm", + "SearchSelectExpr", + "SearchTerm", "SearchTermGroup", "SearchTermGroupTypes", + "SemanticRefSearchResult", "WhenFilter", - "SearchSelectExpr", ] @@ -142,15 +143,3 @@ class SemanticRefSearchResult: term_matches: set[str] semantic_ref_matches: list[ScoredSemanticRefOrdinal] - - -__all__ = [ - "KnowledgePropertyName", - "PropertySearchTerm", - "SearchSelectExpr", - "SearchTerm", - "SearchTermGroup", - "SearchTermGroupTypes", - "SemanticRefSearchResult", - "WhenFilter", -] diff --git a/src/typeagent/knowpro/interfaces_storage.py b/src/typeagent/knowpro/interfaces_storage.py index a19834e6..c0450f29 100644 --- a/src/typeagent/knowpro/interfaces_storage.py +++ b/src/typeagent/knowpro/interfaces_storage.py @@ -6,16 +6,18 @@ from collections.abc import AsyncIterable, Iterable from datetime import datetime as Datetime -from typing import Any, Protocol, Self +from typing import Any, NamedTuple, Protocol, Self from pydantic.dataclasses import dataclass from .interfaces_core import ( IMessage, ITermToSemanticRefIndex, + KnowledgeType, MessageOrdinal, SemanticRef, SemanticRefOrdinal, + TextRange, ) from .interfaces_indexes import ( IConversationSecondaryIndexes, @@ -52,12 +54,34 @@ class ConversationMetadata: schema_version: int | None = None created_at: Datetime | None = None updated_at: Datetime | None = None - embedding_size: int | None = None embedding_model: str | None = None tags: list[str] | None = None extra: dict[str, str] | None = None +class SemanticRefMetadata(NamedTuple): + """Lightweight metadata for filtering without full knowledge deserialization.""" + + ordinal: SemanticRefOrdinal + range: TextRange + knowledge_type: KnowledgeType + + +@dataclass +class ChunkFailure: + """Record of a single failed knowledge-extraction attempt for one chunk. + + Stored in the storage provider so that ingestion pipelines can retry just + the failed chunks without re-processing whole messages. + """ + + message_ordinal: int + chunk_ordinal: int + error_class: str + error_message: str + failed_at: Datetime + + class IReadonlyCollection[T, TOrdinal](AsyncIterable[T], Protocol): async def size(self) -> int: ... @@ -92,6 +116,12 @@ class IMessageCollection[TMessage: IMessage]( class ISemanticRefCollection(ICollection[SemanticRef, SemanticRefOrdinal], Protocol): """A collection of SemanticRefs.""" + async def get_metadata_multiple( + self, ordinals: list[SemanticRefOrdinal] + ) -> list[SemanticRefMetadata]: + """Batch-fetch lightweight metadata without deserializing knowledge.""" + ... + class IStorageProvider[TMessage: IMessage](Protocol): """API spec for storage providers -- maybe in-memory or persistent.""" @@ -116,11 +146,11 @@ async def get_conversation_threads(self) -> IConversationThreads: ... # Metadata management - def get_conversation_metadata(self) -> ConversationMetadata: + async def get_conversation_metadata(self) -> ConversationMetadata: """Get conversation metadata (missing fields set to None).""" ... - def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: + async def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: """Set or update conversation metadata key-value pairs. Args: **kwds: Metadata keys and values where: @@ -130,7 +160,7 @@ def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: """ ... - def update_conversation_timestamps( + async def update_conversation_timestamps( self, created_at: Datetime | None = None, updated_at: Datetime | None = None, @@ -139,20 +169,47 @@ def update_conversation_timestamps( ... # Ingested source tracking - def is_source_ingested(self, source_id: str) -> bool: + async def is_source_ingested(self, source_id: str) -> bool: """Check if a source has already been ingested.""" ... - def get_source_status(self, source_id: str) -> str | None: + async def get_source_status(self, source_id: str) -> str | None: """Get the ingestion status of a source.""" ... - def mark_source_ingested( + async def mark_source_ingested( self, source_id: str, status: str = STATUS_INGESTED ) -> None: """Mark a source as ingested (no commit; call within transaction context).""" ... + # Chunk-level extraction failure tracking + + async def record_chunk_failure( + self, + message_ordinal: int, + chunk_ordinal: int, + error_class: str, + error_message: str, + ) -> None: + """Record an extraction failure for a single chunk. + + Idempotent: re-recording overwrites any prior entry for the same + (message_ordinal, chunk_ordinal). No commit; call within transaction + context. + """ + ... + + async def clear_chunk_failure( + self, message_ordinal: int, chunk_ordinal: int + ) -> None: + """Remove the failure record for one chunk (e.g., after a retry succeeds).""" + ... + + async def get_chunk_failures(self) -> list[ChunkFailure]: + """Return all recorded chunk failures, ordered by message and chunk.""" + ... + # Transaction management async def __aenter__(self) -> Self: """Enter transaction context. Calls begin_transaction().""" @@ -183,6 +240,7 @@ class IConversation[ __all__ = [ + "ChunkFailure", "ConversationMetadata", "ICollection", "IConversation", @@ -191,4 +249,5 @@ class IConversation[ "ISemanticRefCollection", "IStorageProvider", "STATUS_INGESTED", + "SemanticRefMetadata", ] diff --git a/src/typeagent/knowpro/knowledge.py b/src/typeagent/knowpro/knowledge.py index bda7397f..e2503967 100644 --- a/src/typeagent/knowpro/knowledge.py +++ b/src/typeagent/knowpro/knowledge.py @@ -7,7 +7,9 @@ from typechat import Result, TypeChatLanguageModel -from . import convknowledge, kplib +from . import convknowledge +from . import knowledge_schema as kplib +from ..aitools import model_adapters from .interfaces import IKnowledgeExtractor @@ -15,7 +17,7 @@ def create_knowledge_extractor( chat_model: TypeChatLanguageModel | None = None, ) -> convknowledge.KnowledgeExtractor: """Create a knowledge extractor using the given Chat Model.""" - chat_model = chat_model or convknowledge.create_typechat_model() + chat_model = chat_model or model_adapters.create_chat_model() extractor = convknowledge.KnowledgeExtractor( chat_model, max_chars_per_chunk=4096, merge_action_knowledge=False ) @@ -25,7 +27,6 @@ def create_knowledge_extractor( async def extract_knowledge_from_text( knowledge_extractor: IKnowledgeExtractor, text: str, - max_retries: int, ) -> Result[kplib.KnowledgeResponse]: """Extract knowledge from a single text input with retries.""" # TODO: Add a retry mechanism to handle transient errors. @@ -36,21 +37,17 @@ async def batch_worker( q: asyncio.Queue[tuple[int, str] | None], knowledge_extractor: IKnowledgeExtractor, results: dict[int, Result[kplib.KnowledgeResponse]], - max_retries: int, ) -> None: while item := await q.get(): index, text = item - result = await extract_knowledge_from_text( - knowledge_extractor, text, max_retries - ) + result = await extract_knowledge_from_text(knowledge_extractor, text) results[index] = result async def extract_knowledge_from_text_batch( knowledge_extractor: IKnowledgeExtractor, text_batch: list[str], - concurrency: int = 2, - max_retries: int = 3, + concurrency: int = 4, ) -> list[Result[kplib.KnowledgeResponse]]: """Extract knowledge from a batch of text inputs concurrently.""" if not text_batch: @@ -63,7 +60,7 @@ async def extract_knowledge_from_text_batch( async with asyncio.TaskGroup() as tg: for _ in range(concurrency): - tg.create_task(batch_worker(q, knowledge_extractor, results, max_retries)) + tg.create_task(batch_worker(q, knowledge_extractor, results)) for index, text in enumerate(text_batch): await q.put((index, text)) @@ -202,7 +199,6 @@ async def extract_knowledge_for_text_batch_q( knowledge_extractor: convknowledge.KnowledgeExtractor, text_batch: list[str], concurrency: int = 2, - max_retries: int = 3, ) -> list[Result[kplib.KnowledgeResponse]]: """Extract knowledge for a batch of text inputs using a task queue.""" raise NotImplementedError("TODO") @@ -211,7 +207,7 @@ async def extract_knowledge_for_text_batch_q( # await run_in_batches( # task_batch, - # lambda text: extract_knowledge_from_text(knowledge_extractor, text, max_retries), + # lambda text: extract_knowledge_from_text(knowledge_extractor, text), # concurrency, # ) diff --git a/src/typeagent/knowpro/kplib.py b/src/typeagent/knowpro/knowledge_schema.py similarity index 100% rename from src/typeagent/knowpro/kplib.py rename to src/typeagent/knowpro/knowledge_schema.py diff --git a/src/typeagent/knowpro/messageutils.py b/src/typeagent/knowpro/messageutils.py index bd7cf879..6b4afb6e 100644 --- a/src/typeagent/knowpro/messageutils.py +++ b/src/typeagent/knowpro/messageutils.py @@ -5,7 +5,6 @@ from .interfaces import ( IMessage, - IMessageCollection, MessageOrdinal, TextLocation, TextRange, @@ -23,90 +22,28 @@ def text_range_from_message_chunk( ) -async def get_message_chunk_batch[TMessage: IMessage]( - messages: IMessageCollection[TMessage], - message_ordinal_start_at: MessageOrdinal, - batch_size: int, -) -> list[list[TextLocation]]: - """ - Get batches of message chunk locations for processing. - - Args: - messages: Collection of messages to process - message_ordinal_start_at: Starting message ordinal - batch_size: Number of message chunks per batch - - Yields: - Lists of TextLocation objects, each representing a message chunk - """ - batches: list[list[TextLocation]] = [] - current_batch: list[TextLocation] = [] - - message_ordinal = message_ordinal_start_at - async for message in messages: - if message_ordinal < message_ordinal_start_at: - message_ordinal += 1 - continue - - # Process each text chunk in the message - for chunk_ordinal in range(len(message.text_chunks)): - text_location = TextLocation( - message_ordinal=message_ordinal, - chunk_ordinal=chunk_ordinal, - ) - current_batch.append(text_location) - - # When batch is full, yield it and start a new one - if len(current_batch) >= batch_size: - batches.append(current_batch) - current_batch = [] - - message_ordinal += 1 - - # Don't forget the last batch if it has items - if current_batch: - batches.append(current_batch) - - return batches - - -def get_message_chunk_batch_from_list[TMessage: IMessage]( +def get_all_message_chunk_locations[TMessage: IMessage]( messages: list[TMessage], message_ordinal_start_at: MessageOrdinal, - batch_size: int, -) -> list[list[TextLocation]]: +) -> list[TextLocation]: """ - Get batches of message chunk locations for processing from a list of messages. + Get a flat list of all message chunk locations from a list of messages. Args: messages: List of messages to process message_ordinal_start_at: Starting message ordinal (ordinal of first message in list) - batch_size: Number of message chunks per batch Returns: - Lists of TextLocation objects, each representing a message chunk + Flat list of TextLocation objects, one per message chunk """ - batches: list[list[TextLocation]] = [] - current_batch: list[TextLocation] = [] - + locations: list[TextLocation] = [] for idx, message in enumerate(messages): message_ordinal = message_ordinal_start_at + idx - - # Process each text chunk in the message for chunk_ordinal in range(len(message.text_chunks)): - text_location = TextLocation( - message_ordinal=message_ordinal, - chunk_ordinal=chunk_ordinal, + locations.append( + TextLocation( + message_ordinal=message_ordinal, + chunk_ordinal=chunk_ordinal, + ) ) - current_batch.append(text_location) - - # When batch is full, yield it and start a new one - if len(current_batch) >= batch_size: - batches.append(current_batch) - current_batch = [] - - # Don't forget the last batch if it has items - if current_batch: - batches.append(current_batch) - - return batches + return locations diff --git a/src/typeagent/knowpro/query.py b/src/typeagent/knowpro/query.py index 3fc471db..0d519bdb 100644 --- a/src/typeagent/knowpro/query.py +++ b/src/typeagent/knowpro/query.py @@ -37,6 +37,7 @@ ScoredSemanticRefOrdinal, SearchTerm, SemanticRef, + SemanticRefMetadata, SemanticRefOrdinal, SemanticRefSearchResult, Term, @@ -44,7 +45,8 @@ TextRange, Thread, ) -from .kplib import ConcreteEntity +from .knowledge_schema import ConcreteEntity +from .utils import aenumerate # TODO: Move to compilelib.py type BooleanOp = Literal["and", "or", "or_max"] @@ -101,11 +103,14 @@ async def get_text_range_for_date_range( messages = conversation.messages range_start_ordinal: MessageOrdinal = -1 range_end_ordinal = range_start_ordinal - async for message in messages: - if Datetime.fromisoformat(message.timestamp) in date_range: + async for ordinal, message in aenumerate(messages): + if ( + message.timestamp + and Datetime.fromisoformat(message.timestamp) in date_range + ): if range_start_ordinal < 0: - range_start_ordinal = message.ordinal - range_end_ordinal = message.ordinal + range_start_ordinal = ordinal + range_end_ordinal = ordinal else: if range_start_ordinal >= 0: # We have a range, so break. @@ -170,17 +175,14 @@ async def lookup_term_filtered( semantic_ref_index: ITermToSemanticRefIndex, term: Term, semantic_refs: ISemanticRefCollection, - filter: Callable[[SemanticRef, ScoredSemanticRefOrdinal], bool], + filter: Callable[[SemanticRefMetadata, ScoredSemanticRefOrdinal], bool], ) -> list[ScoredSemanticRefOrdinal] | None: """Look up a term in the semantic reference index and filter the results.""" scored_refs = await semantic_ref_index.lookup_term(term.text) if scored_refs: - filtered = [] - for sr in scored_refs: - semantic_ref = await semantic_refs.get_item(sr.semantic_ref_ordinal) - if filter(semantic_ref, sr): - filtered.append(sr) - return filtered + ordinals = [sr.semantic_ref_ordinal for sr in scored_refs] + metadata = await semantic_refs.get_metadata_multiple(ordinals) + return [sr for sr, m in zip(scored_refs, metadata) if filter(m, sr)] return None @@ -198,10 +200,8 @@ async def lookup_term( semantic_ref_index, term, semantic_refs, - lambda sr, _: ( - not knowledge_type or sr.knowledge.knowledge_type == knowledge_type - ) - and ranges_in_scope.is_range_in_scope(sr.range), + lambda m, _: (not knowledge_type or m.knowledge_type == knowledge_type) + and ranges_in_scope.is_range_in_scope(m.range), ) return await semantic_ref_index.lookup_term(term.text) @@ -696,7 +696,7 @@ class WhereSemanticRefExpr(QueryOpExpr[SemanticRefAccumulator]): async def eval(self, context: QueryEvalContext) -> SemanticRefAccumulator: accumulator = await self.source_expr.eval(context) - filtered = SemanticRefAccumulator(accumulator.search_term_matches) + filtered = SemanticRefAccumulator(set(accumulator.search_term_matches)) # Filter matches asynchronously filtered_matches = [] @@ -842,7 +842,7 @@ def text_ranges_from_message_ordinals( # TODO: Move to messagelib.py def text_range_from_message(message_ordinal: MessageOrdinal) -> TextRange: - return TextRange(start=TextLocation(message_ordinal)) + return TextRange(TextLocation(message_ordinal)) # TODO: ThreadSelector @@ -907,7 +907,7 @@ async def eval(self, context: QueryEvalContext) -> MessageAccumulator: ) if len(message_ordinals) == len(matches): matches.clear_matches() - ranked_messages = message_index.lookup_in_subset_by_embedding( + ranked_messages = await message_index.lookup_in_subset_by_embedding( self.embedding, message_ordinals, self.max_messages, diff --git a/src/typeagent/knowpro/search.py b/src/typeagent/knowpro/search.py index b7c37ac7..bfc94459 100644 --- a/src/typeagent/knowpro/search.py +++ b/src/typeagent/knowpro/search.py @@ -24,7 +24,7 @@ Term, WhenFilter, ) -from .kplib import ConcreteEntity +from .knowledge_schema import ConcreteEntity from .query import ( BooleanOp, CompiledSearchTerm, @@ -90,11 +90,9 @@ class SearchOptions: def __repr__(self): parts = [] - for key in dir(self): - if not key.startswith("_"): - value = getattr(self, key) - if value is not None: - parts.append(f"{key}={value!r}") + for key, value in vars(self).items(): + if not key.startswith("_") and value is not None: + parts.append(f"{key}={value!r}") return f"{self.__class__.__name__}({', '.join(parts)})" diff --git a/src/typeagent/knowpro/searchlang.py b/src/typeagent/knowpro/searchlang.py index dbb8092a..e2e990de 100644 --- a/src/typeagent/knowpro/searchlang.py +++ b/src/typeagent/knowpro/searchlang.py @@ -83,11 +83,9 @@ class LanguageSearchOptions(SearchOptions): def __repr__(self): parts = [] - for key in dir(self): - if not key.startswith("_"): - value = getattr(self, key) - if value is not None: - parts.append(f"{key}={value!r}") + for key, value in vars(self).items(): + if not key.startswith("_") and value is not None: + parts.append(f"{key}={value!r}") return f"{self.__class__.__name__}({', '.join(parts)})" @@ -371,6 +369,9 @@ def compile_action_term_as_search_terms( self.compile_entity_terms_as_search_terms( action_term.additional_entities, action_group ) + # only append the nested or_max wrapper when created one (use_or_max) and it's non-empty. + if use_or_max and action_group.terms: + term_group.terms.append(action_group) return term_group def compile_search_terms( @@ -609,21 +610,6 @@ def add_entity_name_to_group( exact_match_value, ) - def add_search_term_to_groupadd_entity_name_to_group( - self, - entity_term: EntityTerm, - property_name: PropertyNames, - term_group: SearchTermGroup, - exact_match_value: bool = False, - ) -> None: - if not entity_term.is_name_pronoun: - self.add_property_term_to_group( - property_name.value, - entity_term.name, - term_group, - exact_match_value, - ) - def add_property_term_to_group( self, property_name: str, diff --git a/src/typeagent/knowpro/serialization.py b/src/typeagent/knowpro/serialization.py index 1e48b68c..60cad6b0 100644 --- a/src/typeagent/knowpro/serialization.py +++ b/src/typeagent/knowpro/serialization.py @@ -24,7 +24,7 @@ from pydantic.alias_generators import to_camel -from . import kplib +from . import knowledge_schema as kplib from ..aitools.embeddings import NormalizedEmbeddings from .interfaces import ConversationDataWithIndexes, SearchTermGroupTypes, Tag, Topic @@ -46,9 +46,14 @@ def create_file_header() -> FileHeader: return FileHeader(version="0.1") +class ModelMetadata(TypedDict): + embeddingSize: int + + class EmbeddingFileHeader(TypedDict): relatedCount: NotRequired[int | None] messageCount: NotRequired[int | None] + modelMetadata: NotRequired[ModelMetadata | None] class EmbeddingData(TypedDict): @@ -104,6 +109,7 @@ def to_conversation_file_data[TMessageData]( embedding_file_header = EmbeddingFileHeader() embeddings_list: list[NormalizedEmbeddings] = [] + embedding_size = 0 related_terms_index_data = conversation_data.get("relatedTermsIndexData") if related_terms_index_data is not None: @@ -114,6 +120,8 @@ def to_conversation_file_data[TMessageData]( embeddings_list.append(embeddings) text_embedding_data["embeddings"] = None embedding_file_header["relatedCount"] = len(embeddings) + if embedding_size == 0 and embeddings.ndim == 2: + embedding_size = embeddings.shape[1] message_index_data = conversation_data.get("messageIndexData") if message_index_data is not None: @@ -124,6 +132,13 @@ def to_conversation_file_data[TMessageData]( embeddings_list.append(embeddings) text_embedding_data["embeddings"] = None embedding_file_header["messageCount"] = len(embeddings) + if embedding_size == 0 and embeddings.ndim == 2: + embedding_size = embeddings.shape[1] + + if embedding_size > 0: + embedding_file_header["modelMetadata"] = ModelMetadata( + embeddingSize=embedding_size + ) binary_data = ConversationBinaryData(embeddingsList=embeddings_list) json_data = ConversationJsonData( diff --git a/src/typeagent/knowpro/universal_message.py b/src/typeagent/knowpro/universal_message.py index fe4ebfa4..c5008fe2 100644 --- a/src/typeagent/knowpro/universal_message.py +++ b/src/typeagent/knowpro/universal_message.py @@ -8,7 +8,7 @@ from pydantic import AliasChoices, Field -from . import kplib +from . import knowledge_schema as kplib from .dataclasses import dataclass as pydantic_dataclass from .field_helpers import CamelCaseField from .interfaces import IKnowledgeSource, IMessage, IMessageMetadata @@ -204,6 +204,11 @@ class ConversationMessage(IMessage): Format: "2024-01-01T12:34:56Z" or "1970-01-01T00:01:23Z" (epoch-based) MUST include "Z" suffix to explicitly indicate UTC timezone. """ + source_id: str | None = None + """ + Optional external identifier of the source this message was ingested from + (e.g., a transcript file path or podcast episode id). See ``IMessage.source_id``. + """ def get_knowledge(self) -> kplib.KnowledgeResponse: return self.metadata.get_knowledge() diff --git a/src/typeagent/knowpro/utils.py b/src/typeagent/knowpro/utils.py index 298c09db..92eedacc 100644 --- a/src/typeagent/knowpro/utils.py +++ b/src/typeagent/knowpro/utils.py @@ -3,9 +3,18 @@ """Utility functions for the knowpro package.""" +from collections.abc import AsyncIterable + from .interfaces import MessageOrdinal, TextLocation, TextRange +async def aenumerate[T](aiterable: AsyncIterable[T], start: int = 0): + i = start + async for item in aiterable: + yield i, item + i += 1 + + def text_range_from_message_chunk( message_ordinal: MessageOrdinal, chunk_ordinal: int = 0, diff --git a/src/typeagent/mcp/server.py b/src/typeagent/mcp/server.py index 608137a6..8fb4d03e 100644 --- a/src/typeagent/mcp/server.py +++ b/src/typeagent/mcp/server.py @@ -3,14 +3,17 @@ """Fledgling MCP server on top of typeagent.""" - import argparse from dataclasses import dataclass import os import time from typing import Any -import coverage +try: + import coverage +except ImportError: + coverage = None # type: ignore[assignment] +from dotenv import load_dotenv from mcp.server.fastmcp import Context, FastMCP from mcp.server.session import ServerSession @@ -18,7 +21,8 @@ import typechat # Enable coverage.py before local imports (a no-op unless COVERAGE_PROCESS_START is set). -coverage.process_startup() +if coverage is not None: + coverage.process_startup() from typeagent.aitools import embeddings, utils from typeagent.knowpro import answers, query, searchlang @@ -102,7 +106,7 @@ class ProcessingContext: query_context: query.QueryEvalContext[ podcast.PodcastMessage, TermToSemanticRefIndex ] - embedding_model: embeddings.AsyncEmbeddingModel + embedding_model: embeddings.IEmbeddingModel query_translator: typechat.TypeChatJsonTranslator[SearchQuery] answer_translator: typechat.TypeChatJsonTranslator[AnswerResponse] @@ -246,12 +250,18 @@ async def query_conversation( return QuestionResponse( success=True, answer=combined_answer.answer or "", time_used=dt ) + case _: + return QuestionResponse( + success=False, + answer=f"Unexpected answer type: {combined_answer.type}", + time_used=dt, + ) # Run the MCP server if __name__ == "__main__": # Load env vars - utils.load_dotenv() + load_dotenv() # Set up command-line argument parsing and parse command line parser = argparse.ArgumentParser(description="MCP server for knowpro") diff --git a/src/typeagent/podcasts/podcast.py b/src/typeagent/podcasts/podcast.py index 3ed2639a..5376d20e 100644 --- a/src/typeagent/podcasts/podcast.py +++ b/src/typeagent/podcasts/podcast.py @@ -143,13 +143,19 @@ async def deserialize( @staticmethod def _read_conversation_data_from_file( - filename_prefix: str, embedding_size: int + filename_prefix: str, ) -> ConversationDataWithIndexes[Any]: """Read podcast conversation data from files. No exceptions are caught; they just bubble out.""" with open(filename_prefix + "_data.json", "r", encoding="utf-8") as f: json_data: serialization.ConversationJsonData[PodcastMessageData] = ( json.load(f) ) + embedding_file_header = json_data.get("embeddingFileHeader") + embedding_size = 0 + if embedding_file_header: + model_metadata = embedding_file_header.get("modelMetadata") + if model_metadata: + embedding_size = model_metadata.get("embeddingSize", 0) embeddings_list: list[NormalizedEmbeddings] | None = None if embedding_size: with open(filename_prefix + "_embeddings.bin", "rb") as f: @@ -159,7 +165,7 @@ def _read_conversation_data_from_file( embeddings_list = [embeddings] else: print( - "Warning: not reading embeddings file because size is {embedding_size}" + f"Warning: not reading embeddings file because size is {embedding_size}" ) embeddings_list = None file_data = serialization.ConversationFileData( @@ -178,10 +184,7 @@ async def read_from_file( settings: ConversationSettings, dbname: str | None = None, ) -> "Podcast": - embedding_size = settings.embedding_model.embedding_size - data = Podcast._read_conversation_data_from_file( - filename_prefix, embedding_size - ) + data = Podcast._read_conversation_data_from_file(filename_prefix) provider = await settings.get_storage_provider() msgs = await provider.get_message_collection() diff --git a/src/typeagent/podcasts/podcast_ingest.py b/src/typeagent/podcasts/podcast_ingest.py index bf3e3e21..d2de7c82 100644 --- a/src/typeagent/podcasts/podcast_ingest.py +++ b/src/typeagent/podcasts/podcast_ingest.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +from collections.abc import AsyncIterator from datetime import timedelta import os import re @@ -8,6 +9,7 @@ from ..knowpro.convsettings import ConversationSettings from ..knowpro.interfaces import Datetime +from ..knowpro.interfaces_core import AddMessagesResult from ..knowpro.universal_message import format_timestamp_utc, UNIX_EPOCH from ..storage.utils import create_storage_provider from .podcast import Podcast, PodcastMessage, PodcastMessageMeta @@ -22,6 +24,7 @@ async def ingest_podcast( dbname: str | None = None, batch_size: int = 0, start_message: int = 0, + concurrency: int = 0, verbose: bool = False, ) -> Podcast: """ @@ -37,8 +40,10 @@ async def ingest_podcast( date is unknown (Unix "timestamp left at zero" convention). length_minutes: Total length of podcast in minutes (for proportional timestamp allocation) dbname: Database name or None (to use in-memory non-persistent storage) - batch_size: Number of messages to index per batch (default all messages) + batch_size: Number of messages per call to add_messages_with_indexing + (default: all messages at once). Used for recoverability on crash. start_message: Number of initial messages to skip (for resuming interrupted ingests) + concurrency: Max concurrent knowledge extractions (0 = use settings default) verbose: Whether to print progress information (default False) Returns: @@ -121,19 +126,46 @@ async def ingest_podcast( tags=[podcast_name], ) - # Add messages with indexing to build embeddings, using batch_size - batch_size = batch_size or len(msgs) - for i in range(start_message, len(msgs), batch_size): - batch = msgs[i : i + batch_size] - t0 = time.time() - await pod.add_messages_with_indexing(batch) - t1 = time.time() + # Set source_id on each message for restartability + for i, msg in enumerate(msgs): + msg.source_id = f"{transcript_file_path}#{i}" + + # Add messages using the streaming API (commit-per-batch) + if concurrency: + settings.semantic_ref_index_settings.concurrency = concurrency + + async def _message_stream() -> AsyncIterator[PodcastMessage]: + for msg in msgs[start_message:]: + yield msg + + cumulative_messages = 0 + t0 = time.time() + + def _on_batch_committed(result: AddMessagesResult) -> None: + nonlocal cumulative_messages + batch_start = cumulative_messages + cumulative_messages += result.messages_added if verbose: print( - f"Indexed messages {i} to {i + len(batch) - 1} " - f"in {t1 - t0:.1f} seconds." + f"Indexed messages {batch_start}-{cumulative_messages - 1} " + f"({result.chunks_added} chunks, {result.semrefs_added} semrefs) " + f"at t={time.time() - t0:.1f} seconds." ) + batch_size = batch_size or len(msgs) + result = await pod.add_messages_streaming( + _message_stream(), + batch_size=batch_size, + on_batch_committed=_on_batch_committed, + ) + t1 = time.time() + if verbose: + print( + f"Indexed {result.messages_added} messages " + f"({result.chunks_added} chunks, {result.semrefs_added} semrefs) " + f"in {t1 - t0:.1f} seconds." + ) + return pod diff --git a/src/typeagent/storage/memory/collections.py b/src/typeagent/storage/memory/collections.py index 9973a290..8a5b14eb 100644 --- a/src/typeagent/storage/memory/collections.py +++ b/src/typeagent/storage/memory/collections.py @@ -10,6 +10,7 @@ IMessage, MessageOrdinal, SemanticRef, + SemanticRefMetadata, SemanticRefOrdinal, ) @@ -63,6 +64,18 @@ async def extend(self, items: Iterable[T]) -> None: class MemorySemanticRefCollection(MemoryCollection[SemanticRef, SemanticRefOrdinal]): """A collection of semantic references.""" + async def get_metadata_multiple( + self, ordinals: list[SemanticRefOrdinal] + ) -> list[SemanticRefMetadata]: + return [ + SemanticRefMetadata( + ordinal=o, + range=self.items[o].range, + knowledge_type=self.items[o].knowledge.knowledge_type, + ) + for o in ordinals + ] + class MemoryMessageCollection[TMessage: IMessage]( MemoryCollection[TMessage, MessageOrdinal] diff --git a/src/typeagent/storage/memory/messageindex.py b/src/typeagent/storage/memory/messageindex.py index a56da9ef..8d742794 100644 --- a/src/typeagent/storage/memory/messageindex.py +++ b/src/typeagent/storage/memory/messageindex.py @@ -40,7 +40,7 @@ async def build_message_index[ class IMessageTextEmbeddingIndex(IMessageTextIndex): async def generate_embedding(self, text: str) -> NormalizedEmbedding: ... - def lookup_by_embedding( + async def lookup_by_embedding( self, text_embedding: NormalizedEmbedding, max_matches: int | None = None, @@ -48,7 +48,7 @@ def lookup_by_embedding( predicate: Callable[[MessageOrdinal], bool] | None = None, ) -> list[ScoredMessageOrdinal]: ... - def lookup_in_subset_by_embedding( + async def lookup_in_subset_by_embedding( self, text_embedding: NormalizedEmbedding, ordinals_to_search: list[MessageOrdinal], @@ -128,7 +128,7 @@ async def generate_embedding(self, text: str) -> NormalizedEmbedding: # TODO: Find a prettier API to get an embedding rather than using _vector_base? return await self.text_location_index.generate_embedding(text) - def lookup_in_subset_by_embedding( + async def lookup_in_subset_by_embedding( self, text_embedding: NormalizedEmbedding, ordinals_to_search: list[MessageOrdinal], diff --git a/src/typeagent/storage/memory/propindex.py b/src/typeagent/storage/memory/propindex.py index c35d140d..ecb3e85d 100644 --- a/src/typeagent/storage/memory/propindex.py +++ b/src/typeagent/storage/memory/propindex.py @@ -1,10 +1,11 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +from collections.abc import Sequence import enum from typing import assert_never -from ...knowpro import kplib +from ...knowpro import knowledge_schema as kplib from ...knowpro.collections import TextRangesInScope from ...knowpro.interfaces import ( IConversation, @@ -109,6 +110,63 @@ async def build_property_index(conversation: IConversation) -> None: await add_to_property_index(conversation, 0) +def collect_facet_properties( + facet: kplib.Facet | None, + ordinal: SemanticRefOrdinal, +) -> list[tuple[str, str, SemanticRefOrdinal]]: + """Collect property tuples from a facet without touching any index.""" + if facet is None: + return [] + props: list[tuple[str, str, SemanticRefOrdinal]] = [ + (PropertyNames.FacetName.value, facet.name, ordinal) + ] + value = facet.value + if value is not None: + if isinstance(value, float) and value: + value = f"{value:g}" + props.append((PropertyNames.FacetValue.value, str(value), ordinal)) + return props + + +def collect_entity_properties( + entity: kplib.ConcreteEntity, + ordinal: SemanticRefOrdinal, +) -> list[tuple[str, str, SemanticRefOrdinal]]: + """Collect all property tuples for an entity.""" + props: list[tuple[str, str, SemanticRefOrdinal]] = [ + (PropertyNames.EntityName.value, entity.name, ordinal) + ] + for t in entity.type: + props.append((PropertyNames.EntityType.value, t, ordinal)) + if entity.facets: + for facet in entity.facets: + props.extend(collect_facet_properties(facet, ordinal)) + return props + + +def collect_action_properties( + action: kplib.Action, + ordinal: SemanticRefOrdinal, +) -> list[tuple[str, str, SemanticRefOrdinal]]: + """Collect all property tuples for an action.""" + props: list[tuple[str, str, SemanticRefOrdinal]] = [ + (PropertyNames.Verb.value, " ".join(action.verbs), ordinal) + ] + if action.subject_entity_name != "none": + props.append((PropertyNames.Subject.value, action.subject_entity_name, ordinal)) + if action.object_entity_name != "none": + props.append((PropertyNames.Object.value, action.object_entity_name, ordinal)) + if action.indirect_object_entity_name != "none": + props.append( + ( + PropertyNames.IndirectObject.value, + action.indirect_object_entity_name, + ordinal, + ) + ) + return props + + async def add_to_property_index( conversation: IConversation, start_at_ordinal: SemanticRefOrdinal, @@ -127,29 +185,40 @@ async def add_to_property_index( semantic_refs = conversation.semantic_refs size = await semantic_refs.size() + collected: list[tuple[str, str, SemanticRefOrdinal]] = [] for semantic_ref_ordinal, semantic_ref in enumerate( await semantic_refs.get_slice(start_at_ordinal, size), start_at_ordinal, ): assert semantic_ref.semantic_ref_ordinal == semantic_ref_ordinal if isinstance(semantic_ref.knowledge, kplib.Action): - await add_action_properties_to_index( - semantic_ref.knowledge, property_index, semantic_ref_ordinal + collected.extend( + collect_action_properties( + semantic_ref.knowledge, semantic_ref_ordinal + ) ) elif isinstance(semantic_ref.knowledge, kplib.ConcreteEntity): - await add_entity_properties_to_index( - semantic_ref.knowledge, property_index, semantic_ref_ordinal + collected.extend( + collect_entity_properties( + semantic_ref.knowledge, semantic_ref_ordinal + ) ) elif isinstance(semantic_ref.knowledge, Tag): - tag = semantic_ref.knowledge - await property_index.add_property( - PropertyNames.Tag.value, tag.text, semantic_ref_ordinal + collected.append( + ( + PropertyNames.Tag.value, + semantic_ref.knowledge.text, + semantic_ref_ordinal, + ) ) elif isinstance(semantic_ref.knowledge, Topic): pass else: assert_never(semantic_ref.knowledge) + if collected: + await property_index.add_properties_batch(collected) + class PropertyIndex(IPropertyToSemanticRefIndex): def __init__(self): @@ -183,6 +252,15 @@ async def add_property( else: self._map[term_text] = [semantic_ref_ordinal] + async def add_properties_batch( + self, + properties: Sequence[ + tuple[str, str, SemanticRefOrdinal | ScoredSemanticRefOrdinal] + ], + ) -> None: + for name, value, ordinal in properties: + await self.add_property(name, value, ordinal) + async def clear(self) -> None: self._map = {} @@ -252,12 +330,13 @@ async def lookup_property_in_property_index( property_value, ) if ranges_in_scope is not None and scored_refs: - filtered_refs = [] - for sr in scored_refs: - semantic_ref = await semantic_refs.get_item(sr.semantic_ref_ordinal) - if ranges_in_scope.is_range_in_scope(semantic_ref.range): - filtered_refs.append(sr) - scored_refs = filtered_refs + ordinals = [sr.semantic_ref_ordinal for sr in scored_refs] + metadata = await semantic_refs.get_metadata_multiple(ordinals) + scored_refs = [ + sr + for sr, m in zip(scored_refs, metadata) + if ranges_in_scope.is_range_in_scope(m.range) + ] return scored_refs or None # Return None if no results diff --git a/src/typeagent/storage/memory/provider.py b/src/typeagent/storage/memory/provider.py index 83ef6ab7..603fbd24 100644 --- a/src/typeagent/storage/memory/provider.py +++ b/src/typeagent/storage/memory/provider.py @@ -3,11 +3,11 @@ """In-memory storage provider implementation.""" - -from datetime import datetime +from datetime import datetime, timezone from ...knowpro.convsettings import MessageTextIndexSettings, RelatedTermIndexSettings from ...knowpro.interfaces import ( + ChunkFailure, ConversationMetadata, IConversationThreads, IMessage, @@ -41,6 +41,7 @@ class MemoryStorageProvider[TMessage: IMessage](IStorageProvider[TMessage]): _related_terms_index: RelatedTermsIndex _conversation_threads: ConversationThreads _ingested_sources: set[str] + _chunk_failures: dict[tuple[int, int], ChunkFailure] def __init__( self, @@ -61,6 +62,7 @@ def __init__( thread_settings = message_text_settings.embedding_index_settings self._conversation_threads = ConversationThreads(thread_settings) self._ingested_sources = set() + self._chunk_failures = {} async def __aenter__(self) -> "MemoryStorageProvider[TMessage]": """Enter transaction context. No-op for in-memory storage.""" @@ -105,7 +107,7 @@ async def close(self) -> None: """Close the storage provider.""" pass - def get_conversation_metadata(self) -> ConversationMetadata: + async def get_conversation_metadata(self) -> ConversationMetadata: """Get conversation metadata. For in-memory storage, returns the metadata provided during initialization @@ -113,7 +115,7 @@ def get_conversation_metadata(self) -> ConversationMetadata: """ return self._metadata - def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: + async def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: """Set conversation metadata (no-op for in-memory storage). This method exists for API compatibility with SqliteStorageProvider @@ -124,7 +126,7 @@ def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: """ pass - def update_conversation_timestamps( + async def update_conversation_timestamps( self, created_at: datetime | None = None, updated_at: datetime | None = None, @@ -140,7 +142,7 @@ def update_conversation_timestamps( """ pass - def is_source_ingested(self, source_id: str) -> bool: + async def is_source_ingested(self, source_id: str) -> bool: """Check if a source has already been ingested. Args: @@ -151,7 +153,7 @@ def is_source_ingested(self, source_id: str) -> bool: """ return source_id in self._ingested_sources - def get_source_status(self, source_id: str) -> str | None: + async def get_source_status(self, source_id: str) -> str | None: """Get the ingestion status of a source. Args: @@ -164,7 +166,7 @@ def get_source_status(self, source_id: str) -> str | None: return STATUS_INGESTED return None - def mark_source_ingested( + async def mark_source_ingested( self, source_id: str, status: str = STATUS_INGESTED ) -> None: """Mark a source as ingested. @@ -173,3 +175,29 @@ def mark_source_ingested( source_id: External source identifier (email ID, file path, etc.) """ self._ingested_sources.add(source_id) + + async def record_chunk_failure( + self, + message_ordinal: int, + chunk_ordinal: int, + error_class: str, + error_message: str, + ) -> None: + """Record a knowledge-extraction failure for a single chunk.""" + self._chunk_failures[(message_ordinal, chunk_ordinal)] = ChunkFailure( + message_ordinal=message_ordinal, + chunk_ordinal=chunk_ordinal, + error_class=error_class, + error_message=error_message, + failed_at=datetime.now(timezone.utc), + ) + + async def clear_chunk_failure( + self, message_ordinal: int, chunk_ordinal: int + ) -> None: + """Remove a previously recorded chunk failure (no-op if absent).""" + self._chunk_failures.pop((message_ordinal, chunk_ordinal), None) + + async def get_chunk_failures(self) -> list[ChunkFailure]: + """Return all recorded chunk failures, ordered by (msg_ordinal, chunk_ordinal).""" + return [self._chunk_failures[k] for k in sorted(self._chunk_failures)] diff --git a/src/typeagent/storage/memory/semrefindex.py b/src/typeagent/storage/memory/semrefindex.py index ec44c876..8654e5a3 100644 --- a/src/typeagent/storage/memory/semrefindex.py +++ b/src/typeagent/storage/memory/semrefindex.py @@ -3,11 +3,13 @@ from __future__ import annotations # TODO: Avoid -from collections.abc import AsyncIterable, Callable +from collections.abc import AsyncIterable, Callable, Sequence from typechat import Failure -from ...knowpro import convknowledge, kplib, secindex +from ...knowpro import convknowledge +from ...knowpro import knowledge_schema as kplib +from ...knowpro import secindex from ...knowpro.convsettings import ConversationSettings, SemanticRefIndexSettings from ...knowpro.interfaces import ( # Interfaces.; Other imports. IConversation, @@ -24,26 +26,13 @@ TermToSemanticRefIndexData, TermToSemanticRefIndexItemData, TextLocation, - TextRange, Topic, ) from ...knowpro.knowledge import extract_knowledge_from_text_batch from ...knowpro.messageutils import ( - get_message_chunk_batch, text_range_from_message_chunk, ) - -def text_range_from_location( - message_ordinal: MessageOrdinal, - chunk_ordinal: int = 0, -) -> TextRange: - return TextRange( - start=TextLocation(message_ordinal, chunk_ordinal), - end=None, - ) - - type KnowledgeValidator = Callable[ [ KnowledgeType, # knowledge_type @@ -60,6 +49,7 @@ async def add_batch_to_semantic_ref_index[ batch: list[TextLocation], knowledge_extractor: IKnowledgeExtractor, terms_added: set[str] | None = None, + concurrency: int = 4, ) -> None: messages = conversation.messages @@ -73,7 +63,7 @@ async def add_batch_to_semantic_ref_index[ knowledge_results = await extract_knowledge_from_text_batch( knowledge_extractor, text_batch, - len(text_batch), + concurrency, ) for i, knowledge_result in enumerate(knowledge_results): if isinstance(knowledge_result, Failure): @@ -99,6 +89,7 @@ async def add_batch_to_semantic_ref_index_from_list[ batch: list[TextLocation], knowledge_extractor: IKnowledgeExtractor, terms_added: set[str] | None = None, + concurrency: int = 4, ) -> None: """ Add a batch of knowledge to semantic ref index, extracting from provided message list. @@ -131,7 +122,7 @@ async def add_batch_to_semantic_ref_index_from_list[ knowledge_results = await extract_knowledge_from_text_batch( knowledge_extractor, text_batch, - len(text_batch), + concurrency, ) for i, knowledge_result in enumerate(knowledge_results): if isinstance(knowledge_result, Failure): @@ -149,31 +140,6 @@ async def add_batch_to_semantic_ref_index_from_list[ ) -async def add_entity_to_index( - entity: kplib.ConcreteEntity, - semantic_refs: ISemanticRefCollection, - semantic_ref_index: ITermToSemanticRefIndex, - message_ordinal: MessageOrdinal, - chunk_ordinal: int = 0, -) -> None: - ref_ordinal = await semantic_refs.size() - await semantic_refs.append( - SemanticRef( - semantic_ref_ordinal=ref_ordinal, - range=text_range_from_location(message_ordinal, chunk_ordinal), - knowledge=entity, - ) - ) - await semantic_ref_index.add_term(entity.name, ref_ordinal) - # Add each type as a separate term. - for type in entity.type: - await semantic_ref_index.add_term(type, ref_ordinal) - # Add every facet name as a separate term. - if entity.facets: - for facet in entity.facets: - await add_facet(facet, ref_ordinal, semantic_ref_index) - - async def add_term_to_index( index: ITermToSemanticRefIndex, term: str, @@ -198,7 +164,7 @@ async def add_entity( semantic_refs: ISemanticRefCollection, semantic_ref_index: ITermToSemanticRefIndex, message_ordinal: MessageOrdinal, - chunk_ordinal: int, + chunk_ordinal: int = 0, terms_added: set[str] | None = None, ) -> None: """Add an entity to the semantic reference index. @@ -267,7 +233,7 @@ async def add_topic( semantic_refs: ISemanticRefCollection, semantic_ref_index: ITermToSemanticRefIndex, message_ordinal: MessageOrdinal, - chunk_ordinal: int, + chunk_ordinal: int = 0, terms_added: set[str] | None = None, ) -> None: """Add a topic to the semantic reference index. @@ -302,7 +268,7 @@ async def add_action( semantic_refs: ISemanticRefCollection, semantic_ref_index: ITermToSemanticRefIndex, message_ordinal: MessageOrdinal, - chunk_ordinal: int, + chunk_ordinal: int = 0, terms_added: set[str] | None = None, ) -> None: """Add an action to the semantic reference index. @@ -461,61 +427,6 @@ def validate_entity(entity: kplib.ConcreteEntity) -> bool: return bool(entity.name) -async def add_topic_to_index( - topic: Topic | str, - semantic_refs: ISemanticRefCollection, - semantic_ref_index: ITermToSemanticRefIndex, - message_ordinal: MessageOrdinal, - chunk_ordinal: int = 0, -) -> None: - if isinstance(topic, str): - topic = Topic(text=topic) - ref_ordinal = await semantic_refs.size() - await semantic_refs.append( - SemanticRef( - semantic_ref_ordinal=ref_ordinal, - range=text_range_from_location(message_ordinal, chunk_ordinal), - knowledge=topic, - ) - ) - await semantic_ref_index.add_term(topic.text, ref_ordinal) - - -async def add_action_to_index( - action: kplib.Action, - semantic_refs: ISemanticRefCollection, - semantic_ref_index: ITermToSemanticRefIndex, - message_ordinal: int, - chunk_ordinal: int = 0, -) -> None: - ref_ordinal = await semantic_refs.size() - await semantic_refs.append( - SemanticRef( - semantic_ref_ordinal=ref_ordinal, - range=text_range_from_location(message_ordinal, chunk_ordinal), - knowledge=action, - ) - ) - await semantic_ref_index.add_term(" ".join(action.verbs), ref_ordinal) - if action.subject_entity_name != "none": - await semantic_ref_index.add_term(action.subject_entity_name, ref_ordinal) - if action.object_entity_name != "none": - await semantic_ref_index.add_term(action.object_entity_name, ref_ordinal) - if action.indirect_object_entity_name != "none": - await semantic_ref_index.add_term( - action.indirect_object_entity_name, ref_ordinal - ) - if action.params: - for param in action.params: - if isinstance(param, str): - await semantic_ref_index.add_term(param, ref_ordinal) - else: - await semantic_ref_index.add_term(param.name, ref_ordinal) - if isinstance(param.value, str): - await semantic_ref_index.add_term(param.value, ref_ordinal) - await add_facet(action.subject_entity_facet, ref_ordinal, semantic_ref_index) - - async def add_knowledge_to_index( semantic_refs: ISemanticRefCollection, semantic_ref_index: ITermToSemanticRefIndex, @@ -523,20 +434,16 @@ async def add_knowledge_to_index( knowledge: kplib.KnowledgeResponse, ) -> None: for entity in knowledge.entities: - await add_entity_to_index( - entity, semantic_refs, semantic_ref_index, message_ordinal - ) + await add_entity(entity, semantic_refs, semantic_ref_index, message_ordinal) for action in knowledge.actions: - await add_action_to_index( - action, semantic_refs, semantic_ref_index, message_ordinal - ) + await add_action(action, semantic_refs, semantic_ref_index, message_ordinal) for inverse_action in knowledge.inverse_actions: - await add_action_to_index( + await add_action( inverse_action, semantic_refs, semantic_ref_index, message_ordinal ) for topic in knowledge.topics: - await add_topic_to_index( - topic, semantic_refs, semantic_ref_index, message_ordinal + await add_topic( + Topic(text=topic), semantic_refs, semantic_ref_index, message_ordinal ) @@ -566,17 +473,64 @@ async def add_metadata_to_index[TMessage: IMessage]( knowledge_response = msg.get_knowledge() for entity in knowledge_response.entities: if knowledge_validator is None or knowledge_validator("entity", entity): - await add_entity_to_index(entity, semantic_refs, semantic_ref_index, i) + await add_entity(entity, semantic_refs, semantic_ref_index, i) for action in knowledge_response.actions: if knowledge_validator is None or knowledge_validator("action", action): - await add_action_to_index(action, semantic_refs, semantic_ref_index, i) + await add_action(action, semantic_refs, semantic_ref_index, i) + for inverse_action in knowledge_response.inverse_actions: + if knowledge_validator is None or knowledge_validator( + "action", inverse_action + ): + await add_action(inverse_action, semantic_refs, semantic_ref_index, i) for topic_response in knowledge_response.topics: topic = Topic(text=topic_response) if knowledge_validator is None or knowledge_validator("topic", topic): - await add_topic_to_index(topic, semantic_refs, semantic_ref_index, i) + await add_topic(topic, semantic_refs, semantic_ref_index, i) i += 1 +def collect_facet_terms(facet: kplib.Facet | None) -> list[str]: + """Collect terms from a facet without touching any index.""" + if facet is None: + return [] + terms = [facet.name] + if facet.value is not None: + terms.append(str(facet.value)) + return terms + + +def collect_entity_terms(entity: kplib.ConcreteEntity) -> list[str]: + """Collect all terms an entity would add to the semantic ref index.""" + terms = [entity.name] + for t in entity.type: + terms.append(t) + if entity.facets: + for facet in entity.facets: + terms.extend(collect_facet_terms(facet)) + return terms + + +def collect_action_terms(action: kplib.Action) -> list[str]: + """Collect all terms an action would add to the semantic ref index.""" + terms = [" ".join(action.verbs)] + if action.subject_entity_name != "none": + terms.append(action.subject_entity_name) + if action.object_entity_name != "none": + terms.append(action.object_entity_name) + if action.indirect_object_entity_name != "none": + terms.append(action.indirect_object_entity_name) + if action.params: + for param in action.params: + if isinstance(param, str): + terms.append(param) + else: + terms.append(param.name) + if isinstance(param.value, str): + terms.append(param.value) + terms.extend(collect_facet_terms(action.subject_entity_facet)) + return terms + + async def add_metadata_to_index_from_list[TMessage: IMessage]( messages: list[TMessage], semantic_refs: ISemanticRefCollection, @@ -585,18 +539,63 @@ async def add_metadata_to_index_from_list[TMessage: IMessage]( knowledge_validator: KnowledgeValidator | None = None, ) -> None: """Extract metadata knowledge from a list of messages starting at ordinal.""" + next_ordinal = await semantic_refs.size() + collected_refs: list[SemanticRef] = [] + collected_terms: list[tuple[str, SemanticRefOrdinal]] = [] + for i, msg in enumerate(messages, start_from_ordinal): knowledge_response = msg.get_knowledge() for entity in knowledge_response.entities: if knowledge_validator is None or knowledge_validator("entity", entity): - await add_entity_to_index(entity, semantic_refs, semantic_ref_index, i) + ref = SemanticRef( + semantic_ref_ordinal=next_ordinal, + range=text_range_from_message_chunk(i), + knowledge=entity, + ) + collected_refs.append(ref) + for term in collect_entity_terms(entity): + collected_terms.append((term, next_ordinal)) + next_ordinal += 1 for action in knowledge_response.actions: if knowledge_validator is None or knowledge_validator("action", action): - await add_action_to_index(action, semantic_refs, semantic_ref_index, i) + ref = SemanticRef( + semantic_ref_ordinal=next_ordinal, + range=text_range_from_message_chunk(i), + knowledge=action, + ) + collected_refs.append(ref) + for term in collect_action_terms(action): + collected_terms.append((term, next_ordinal)) + next_ordinal += 1 + for inverse_action in knowledge_response.inverse_actions: + if knowledge_validator is None or knowledge_validator( + "action", inverse_action + ): + ref = SemanticRef( + semantic_ref_ordinal=next_ordinal, + range=text_range_from_message_chunk(i), + knowledge=inverse_action, + ) + collected_refs.append(ref) + for term in collect_action_terms(inverse_action): + collected_terms.append((term, next_ordinal)) + next_ordinal += 1 for topic_response in knowledge_response.topics: topic = Topic(text=topic_response) if knowledge_validator is None or knowledge_validator("topic", topic): - await add_topic_to_index(topic, semantic_refs, semantic_ref_index, i) + ref = SemanticRef( + semantic_ref_ordinal=next_ordinal, + range=text_range_from_message_chunk(i), + knowledge=topic, + ) + collected_refs.append(ref) + collected_terms.append((topic.text, next_ordinal)) + next_ordinal += 1 + + if collected_refs: + await semantic_refs.extend(collected_refs) + if collected_terms: + await semantic_ref_index.add_terms_batch(collected_terms) class TermToSemanticRefIndex(ITermToSemanticRefIndex): @@ -635,6 +634,13 @@ async def add_term( self._map[term] = [semantic_ref_ordinal] return term + async def add_terms_batch( + self, + terms: Sequence[tuple[str, SemanticRefOrdinal | ScoredSemanticRefOrdinal]], + ) -> None: + for term, ordinal in terms: + await self.add_term(term, ordinal) + async def lookup_term(self, term: str) -> list[ScoredSemanticRefOrdinal] | None: return self._map.get(self._prepare_term(term)) or [] @@ -721,24 +727,34 @@ async def add_to_semantic_ref_index[ """Add semantic references to the conversation's semantic reference index.""" # Only create knowledge extractor if auto extraction is enabled - knowledge_extractor = None if settings.auto_extract_knowledge: knowledge_extractor = ( settings.knowledge_extractor or convknowledge.KnowledgeExtractor() ) - # Process messages in batches for LLM knowledge extraction - batches = await get_message_chunk_batch( - conversation.messages, - message_ordinal_start_at, - settings.batch_size, - ) - for text_location_batch in batches: + # Build a flat list of all text locations + text_locations: list[TextLocation] = [] + message_ordinal = message_ordinal_start_at + async for message in conversation.messages: + if message_ordinal < message_ordinal_start_at: + message_ordinal += 1 + continue + for chunk_ordinal in range(len(message.text_chunks)): + text_locations.append( + TextLocation( + message_ordinal=message_ordinal, + chunk_ordinal=chunk_ordinal, + ) + ) + message_ordinal += 1 + + if text_locations: await add_batch_to_semantic_ref_index( conversation, - text_location_batch, + text_locations, knowledge_extractor, terms_added, + concurrency=settings.concurrency, ) diff --git a/src/typeagent/storage/sqlite/collections.py b/src/typeagent/storage/sqlite/collections.py index e8a11827..fe394dcb 100644 --- a/src/typeagent/storage/sqlite/collections.py +++ b/src/typeagent/storage/sqlite/collections.py @@ -46,12 +46,10 @@ def __aiter__(self) -> typing.AsyncGenerator[TMessage, None]: async def _async_iterator(self) -> typing.AsyncGenerator[TMessage, None]: cursor = self.db.cursor() - cursor.execute( - """ + cursor.execute(""" SELECT chunks, chunk_uri, start_timestamp, tags, metadata, extra FROM Messages ORDER BY msg_id - """ - ) + """) for row in cursor: message = self._deserialize_message_from_row(row) yield message @@ -288,12 +286,10 @@ def _size(self) -> int: async def __aiter__(self) -> typing.AsyncGenerator[interfaces.SemanticRef, None]: cursor = self.db.cursor() - cursor.execute( - """ + cursor.execute(""" SELECT semref_id, range_json, knowledge_type, knowledge_json FROM SemanticRefs ORDER BY semref_id - """ - ) + """) for row in cursor: yield self._deserialize_semantic_ref_from_row(row) @@ -335,17 +331,59 @@ async def get_multiple(self, arg: list[int]) -> list[interfaces.SemanticRef]: if len(arg) < 2: return [await self.get_item(ordinal) for ordinal in arg] cursor = self.db.cursor() - cursor.execute( - f""" + cursor.execute(f""" SELECT semref_id, range_json, knowledge_type, knowledge_json FROM SemanticRefs WHERE semref_id IN {tuple(arg)} - """ - ) + """) rows = cursor.fetchall() rowdict = {row[0]: row for row in rows} assert set(rowdict) == set(arg) return [self._deserialize_semantic_ref_from_row(rowdict[ordl]) for ordl in arg] + async def get_metadata_multiple( + self, ordinals: list[int] + ) -> list[interfaces.SemanticRefMetadata]: + if not ordinals: + return [] + cursor = self.db.cursor() + placeholders = ",".join("?" * len(ordinals)) + cursor.execute( + f""" + SELECT semref_id, range_json, knowledge_type + FROM SemanticRefs WHERE semref_id IN ({placeholders}) + """, + ordinals, + ) + rows = cursor.fetchall() + rowdict = {r[0]: r for r in rows} + result = [] + for o in ordinals: + row = rowdict[o] + range_data = json.loads(row[1]) + start = range_data["start"] + end_data = range_data.get("end") + result.append( + interfaces.SemanticRefMetadata( + ordinal=row[0], + range=interfaces.TextRange( + start=interfaces.TextLocation( + start["messageOrdinal"], + start.get("chunkOrdinal", 0), + ), + end=( + interfaces.TextLocation( + end_data["messageOrdinal"], + end_data.get("chunkOrdinal", 0), + ) + if end_data + else None + ), + ), + knowledge_type=row[2], + ) + ) + return result + async def append(self, item: interfaces.SemanticRef) -> None: cursor = self.db.cursor() semref_id, range_json, knowledge_type, knowledge_json = ( diff --git a/src/typeagent/storage/sqlite/messageindex.py b/src/typeagent/storage/sqlite/messageindex.py index 8b7afd54..d48a9761 100644 --- a/src/typeagent/storage/sqlite/messageindex.py +++ b/src/typeagent/storage/sqlite/messageindex.py @@ -63,6 +63,9 @@ async def add_messages_starting_at( for chunk_ord, chunk in enumerate(message.text_chunks): chunks_to_embed.append((msg_ord, chunk_ord, chunk)) + if not chunks_to_embed: + return + embeddings = await self._vectorbase.get_embeddings( [chunk for _, _, chunk in chunks_to_embed], cache=False ) @@ -258,7 +261,7 @@ async def generate_embedding(self, text: str) -> NormalizedEmbedding: """Generate an embedding for the given text.""" return await self._vectorbase.get_embedding(text) - def lookup_by_embedding( + async def lookup_by_embedding( self, text_embedding: NormalizedEmbedding, max_matches: int | None = None, @@ -274,16 +277,16 @@ def lookup_by_embedding( ) return self._scored_locations_to_message_ordinals(scored_locations, max_matches) - def lookup_in_subset_by_embedding( + async def lookup_in_subset_by_embedding( self, text_embedding: NormalizedEmbedding, ordinals_to_search: list[interfaces.MessageOrdinal], max_matches: int | None = None, threshold_score: float | None = None, ) -> list[interfaces.ScoredMessageOrdinal]: - """Look up messages in a subset by embedding (synchronous version).""" + """Look up messages in a subset by embedding.""" ordinals_set = set(ordinals_to_search) - return self.lookup_by_embedding( + return await self.lookup_by_embedding( text_embedding, max_matches, threshold_score, @@ -299,13 +302,11 @@ async def serialize(self) -> interfaces.MessageTextIndexData: """Serialize the message text index.""" # Get all data from the MessageTextIndex table cursor = self.db.cursor() - cursor.execute( - """ + cursor.execute(""" SELECT msg_id, chunk_ordinal, embedding FROM MessageTextIndex ORDER BY msg_id, chunk_ordinal - """ - ) + """) # Build the text locations and embeddings text_locations = [] diff --git a/src/typeagent/storage/sqlite/propindex.py b/src/typeagent/storage/sqlite/propindex.py index 5a0fa63a..59a5a111 100644 --- a/src/typeagent/storage/sqlite/propindex.py +++ b/src/typeagent/storage/sqlite/propindex.py @@ -3,10 +3,15 @@ """SQLite-based property index implementation.""" +from collections.abc import Sequence import sqlite3 from ...knowpro import interfaces from ...knowpro.interfaces import ScoredSemanticRefOrdinal +from ...storage.memory.propindex import ( + make_property_term_text, + split_property_term_text, +) class SqlitePropertyIndex(interfaces.IPropertyToSemanticRefIndex): @@ -46,11 +51,6 @@ async def add_property( score = 1.0 # Normalize property name and value (to match in-memory implementation) - from ...storage.memory.propindex import ( - make_property_term_text, - split_property_term_text, - ) - term_text = make_property_term_text(property_name, value) term_text = term_text.lower() # Matches PropertyIndex._prepare_term_text property_name, value = split_property_term_text(term_text) @@ -67,6 +67,38 @@ async def add_property( (property_name, value, score, semref_id), ) + async def add_properties_batch( + self, + properties: Sequence[ + tuple[ + str, + str, + interfaces.SemanticRefOrdinal | interfaces.ScoredSemanticRefOrdinal, + ] + ], + ) -> None: + if not properties: + return + rows = [] + for property_name, value, ordinal in properties: + if isinstance(ordinal, interfaces.ScoredSemanticRefOrdinal): + semref_id = ordinal.semantic_ref_ordinal + score = ordinal.score + else: + semref_id = ordinal + score = 1.0 + term_text = make_property_term_text(property_name, value) + term_text = term_text.lower() + property_name, value = split_property_term_text(term_text) + if property_name.startswith("prop."): + property_name = property_name[5:] + rows.append((property_name, value, score, semref_id)) + cursor = self.db.cursor() + cursor.executemany( + "INSERT INTO PropertyIndex (prop_name, value_str, score, semref_id) VALUES (?, ?, ?, ?)", + rows, + ) + async def clear(self) -> None: cursor = self.db.cursor() cursor.execute("DELETE FROM PropertyIndex") @@ -77,11 +109,6 @@ async def lookup_property( value: str, ) -> list[interfaces.ScoredSemanticRefOrdinal] | None: # Normalize property name and value (to match in-memory implementation) - from ...storage.memory.propindex import ( - make_property_term_text, - split_property_term_text, - ) - term_text = make_property_term_text(property_name, value) term_text = term_text.lower() # Matches PropertyIndex._prepare_term_text property_name, value = split_property_term_text(term_text) diff --git a/src/typeagent/storage/sqlite/provider.py b/src/typeagent/storage/sqlite/provider.py index b3a63a9c..2978c8ed 100644 --- a/src/typeagent/storage/sqlite/provider.py +++ b/src/typeagent/storage/sqlite/provider.py @@ -6,11 +6,12 @@ from datetime import datetime, timezone import sqlite3 -from ...aitools.embeddings import AsyncEmbeddingModel +from ...aitools.model_adapters import create_embedding_model from ...aitools.vectorbase import TextEmbeddingIndexSettings from ...knowpro import interfaces from ...knowpro.convsettings import MessageTextIndexSettings, RelatedTermIndexSettings from ...knowpro.interfaces import ConversationMetadata, STATUS_INGESTED +from ...knowpro.interfaces_storage import ChunkFailure from .collections import SqliteMessageCollection, SqliteSemanticRefCollection from .messageindex import SqliteMessageTextIndex from .propindex import SqlitePropertyIndex @@ -31,7 +32,7 @@ class SqliteStorageProvider[TMessage: interfaces.IMessage]( """SQLite-backed storage provider implementation. This provider performs consistency checks on database initialization to ensure - that existing embeddings match the configured embedding_size. If a mismatch is + that existing embeddings match the configured embedding model. If a mismatch is detected, a ValueError is raised with a descriptive error message. """ @@ -52,8 +53,9 @@ def __init__( provided_message_settings = message_text_index_settings provided_related_settings = related_term_index_settings - # Initialize database connection - self.db = sqlite3.connect(db_path) + # Initialize database connection with autocommit mode + # isolation_level=None enables manual transaction control via BEGIN/COMMIT + self.db = sqlite3.connect(db_path, isolation_level=None) # Configure SQLite for optimal bulk insertion performance # TODO: Move into init_db_schema() @@ -118,19 +120,16 @@ def _resolve_embedding_settings( provided_related_settings: RelatedTermIndexSettings | None, ) -> tuple[MessageTextIndexSettings, RelatedTermIndexSettings]: metadata_exists = self._conversation_metadata_exists() - stored_size_str = self._get_single_metadata_value("embedding_size") stored_name = self._get_single_metadata_value("embedding_name") - stored_size = int(stored_size_str) if stored_size_str else None if provided_message_settings is None: - if stored_size is not None or stored_name is not None: - embedding_model = AsyncEmbeddingModel( - embedding_size=stored_size, - model_name=stored_name, - ) + if stored_name is not None: + spec = stored_name + if spec and ":" not in spec: + spec = f"openai:{spec}" + embedding_model = create_embedding_model(spec) base_embedding_settings = TextEmbeddingIndexSettings( embedding_model=embedding_model, - embedding_size=stored_size, ) else: base_embedding_settings = TextEmbeddingIndexSettings() @@ -138,13 +137,7 @@ def _resolve_embedding_settings( else: message_settings = provided_message_settings base_embedding_settings = message_settings.embedding_index_settings - provided_size = base_embedding_settings.embedding_size provided_name = base_embedding_settings.embedding_model.model_name - if stored_size is not None and stored_size != provided_size: - raise ValueError( - f"Conversation metadata embedding_size " - f"({stored_size}) does not match provided embedding size ({provided_size})." - ) if stored_name is not None and stored_name != provided_name: raise ValueError( f"Conversation metadata embedding_model " @@ -156,12 +149,7 @@ def _resolve_embedding_settings( else: related_settings = provided_related_settings related_embedding_settings = related_settings.embedding_index_settings - related_size = related_embedding_settings.embedding_size related_name = related_embedding_settings.embedding_model.model_name - if related_size != base_embedding_settings.embedding_size: - raise ValueError( - "Related term index embedding_size does not match message text index embedding_size" - ) if related_name != base_embedding_settings.embedding_model.model_name: raise ValueError( "Related term index embedding_model does not match message text index embedding_model" @@ -169,17 +157,9 @@ def _resolve_embedding_settings( if related_settings.embedding_index_settings is not base_embedding_settings: related_settings.embedding_index_settings = base_embedding_settings - actual_size = base_embedding_settings.embedding_size actual_name = base_embedding_settings.embedding_model.model_name if self._metadata is not None: - if self._metadata.embedding_size is None: - self._metadata.embedding_size = actual_size - elif self._metadata.embedding_size != actual_size: - raise ValueError( - "Conversation metadata embedding_size does not match provider settings" - ) - if self._metadata.embedding_model is None: self._metadata.embedding_model = actual_name elif self._metadata.embedding_model != actual_name: @@ -189,8 +169,6 @@ def _resolve_embedding_settings( if metadata_exists: metadata_updates: dict[str, str] = {} - if stored_size is None: - metadata_updates["embedding_size"] = str(actual_size) if stored_name is None: metadata_updates["embedding_name"] = actual_name if metadata_updates: @@ -199,51 +177,47 @@ def _resolve_embedding_settings( return message_settings, related_settings def _check_embedding_consistency(self) -> None: - """Check that existing embeddings in the database match the expected embedding size. + """Check that existing embeddings in the database are consistent. - This method is called during initialization to ensure that embeddings stored in the - database match the embedding_size specified in ConversationSettings. This prevents - runtime errors when trying to use embeddings of incompatible sizes. + This method is called during initialization to ensure that embeddings + stored in the message text index and related terms index have the same + size. This prevents runtime errors when trying to use embeddings of + incompatible sizes. Raises: - ValueError: If embeddings in the database don't match the expected size. + ValueError: If embeddings in the database have inconsistent sizes. """ from .schema import deserialize_embedding cursor = self.db.cursor() - expected_size = ( - self.message_text_index_settings.embedding_index_settings.embedding_size - ) - # Check message text index embeddings + # Get size from message text index embeddings + message_size: int | None = None cursor.execute("SELECT embedding FROM MessageTextIndex LIMIT 1") row = cursor.fetchone() if row and row[0]: embedding = deserialize_embedding(row[0]) - actual_size = len(embedding) - if actual_size != expected_size: - raise ValueError( - f"Message text index embedding size mismatch: " - f"database contains embeddings of size {actual_size}, " - f"but ConversationSettings specifies embedding_size={expected_size}. " - f"The database was likely created with a different embedding model. " - f"Please use the same embedding model or create a new database." - ) + message_size = len(embedding) - # Check related terms fuzzy index embeddings + # Get size from related terms fuzzy index embeddings + related_size: int | None = None cursor.execute("SELECT term_embedding FROM RelatedTermsFuzzy LIMIT 1") row = cursor.fetchone() if row and row[0]: embedding = deserialize_embedding(row[0]) - actual_size = len(embedding) - if actual_size != expected_size: - raise ValueError( - f"Related terms index embedding size mismatch: " - f"database contains embeddings of size {actual_size}, " - f"but ConversationSettings specifies embedding_size={expected_size}. " - f"The database was likely created with a different embedding model. " - f"Please use the same embedding model or create a new database." - ) + related_size = len(embedding) + + if ( + message_size is not None + and related_size is not None + and message_size != related_size + ): + raise ValueError( + f"Embedding size mismatch: " + f"message text index has size {message_size}, " + f"but related terms index has size {related_size}. " + f"The database may be corrupted." + ) def _init_conversation_metadata_if_needed(self) -> None: """Initialize conversation metadata if the database is new (empty metadata table). @@ -272,18 +246,10 @@ def _init_conversation_metadata_if_needed(self) -> None: tags = None extras = {} - actual_embedding_size = ( - self.message_text_index_settings.embedding_index_settings.embedding_size - ) actual_embedding_name = ( self.message_text_index_settings.embedding_index_settings.embedding_model.model_name ) - metadata_embedding_size = ( - self._metadata.embedding_size - if self._metadata and self._metadata.embedding_size is not None - else actual_embedding_size - ) metadata_embedding_name = ( self._metadata.embedding_model if self._metadata and self._metadata.embedding_model is not None @@ -305,13 +271,19 @@ def _init_conversation_metadata_if_needed(self) -> None: created_at=format_timestamp_utc(current_time), updated_at=format_timestamp_utc(current_time), tag=tags, # None or list of tags - embedding_size=str(metadata_embedding_size), embedding_name=metadata_embedding_name, **extras, ) async def __aenter__(self) -> "SqliteStorageProvider[TMessage]": """Enter transaction context.""" + if self.db.in_transaction: + raise RuntimeError( + "Cannot start a new transaction: a transaction is already in progress. " + "This may happen if: (1) you're nesting 'async with storage:' blocks, " + "(2) a previous transaction was not properly committed/rolled back, or " + "(3) the database file was left in an inconsistent state from a crash." + ) self.db.execute("BEGIN IMMEDIATE") # Initialize metadata on first write transaction self._init_conversation_metadata_if_needed() @@ -452,7 +424,7 @@ async def deserialize(self, data: dict) -> None: if data.get("messageIndexData"): await self._message_text_index.deserialize(data["messageIndexData"]) - def get_conversation_metadata(self) -> ConversationMetadata: + async def get_conversation_metadata(self) -> ConversationMetadata: """Get conversation metadata.""" cursor = self.db.cursor() @@ -505,9 +477,6 @@ def parse_datetime(value_str: str) -> datetime: updated_at_str = get_single("updated_at") updated_at = parse_datetime(updated_at_str) if updated_at_str else None - embedding_size_str = get_single("embedding_size") - embedding_size = int(embedding_size_str) if embedding_size_str else None - embedding_model = get_single("embedding_name") # Handle tags (multiple values allowed, None if key doesn't exist) @@ -534,13 +503,12 @@ def parse_datetime(value_str: str) -> datetime: schema_version=schema_version, created_at=created_at, updated_at=updated_at, - embedding_size=embedding_size, embedding_model=embedding_model, tags=tags, extra=extra if extra else None, ) - def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: + async def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: """Set or update conversation metadata key-value pairs. Args: @@ -560,7 +528,7 @@ def set_conversation_metadata(self, **kwds: str | list[str] | None) -> None: """ _set_conversation_metadata(self.db, **kwds) - def update_conversation_timestamps( + async def update_conversation_timestamps( self, created_at: datetime | None = None, updated_at: datetime | None = None, @@ -581,9 +549,6 @@ def update_conversation_timestamps( # Insert default values if no metadata exists name_tag = self._metadata.name_tag if self._metadata else "conversation" schema_version = str(CONVERSATION_SCHEMA_VERSION) - actual_embedding_size = ( - self.message_text_index_settings.embedding_index_settings.embedding_size - ) actual_embedding_name = ( self.message_text_index_settings.embedding_index_settings.embedding_model.model_name ) @@ -591,7 +556,6 @@ def update_conversation_timestamps( metadata_kwds: dict[str, str | None] = { "name_tag": name_tag or "conversation", "schema_version": schema_version, - "embedding_size": str(actual_embedding_size), "embedding_name": actual_embedding_name, } if created_at is not None: @@ -613,7 +577,7 @@ def get_db_version(self) -> int: """Get the database schema version.""" return get_db_schema_version(self.db) - def is_source_ingested(self, source_id: str) -> bool: + async def is_source_ingested(self, source_id: str) -> bool: """Check if a source has already been ingested. This is a read-only operation that can be called outside of a transaction. @@ -631,7 +595,7 @@ def is_source_ingested(self, source_id: str) -> bool: row = cursor.fetchone() return row is not None and row[0] == STATUS_INGESTED - def get_source_status(self, source_id: str) -> str | None: + async def get_source_status(self, source_id: str) -> str | None: """Get the ingestion status of a source. Args: @@ -647,7 +611,7 @@ def get_source_status(self, source_id: str) -> str | None: row = cursor.fetchone() return row[0] if row else None - def mark_source_ingested( + async def mark_source_ingested( self, source_id: str, status: str = STATUS_INGESTED ) -> None: """Mark a source as ingested. @@ -664,3 +628,56 @@ def mark_source_ingested( "INSERT OR REPLACE INTO IngestedSources (source_id, status) VALUES (?, ?)", (source_id, status), ) + + async def record_chunk_failure( + self, + message_ordinal: int, + chunk_ordinal: int, + error_class: str, + error_message: str, + ) -> None: + """Record a knowledge-extraction failure for a single chunk. + + Idempotent: re-recording overwrites any prior entry for the same + (message_ordinal, chunk_ordinal). No commit; call within a transaction + context. + """ + failed_at = datetime.now(timezone.utc).isoformat() + cursor = self.db.cursor() + cursor.execute( + """ + INSERT OR REPLACE INTO ChunkFailures + (msg_id, chunk_ordinal, error_class, error_message, failed_at) + VALUES (?, ?, ?, ?, ?) + """, + (message_ordinal, chunk_ordinal, error_class, error_message, failed_at), + ) + + async def clear_chunk_failure( + self, message_ordinal: int, chunk_ordinal: int + ) -> None: + """Remove a previously recorded chunk failure (no-op if absent).""" + cursor = self.db.cursor() + cursor.execute( + "DELETE FROM ChunkFailures WHERE msg_id = ? AND chunk_ordinal = ?", + (message_ordinal, chunk_ordinal), + ) + + async def get_chunk_failures(self) -> list[ChunkFailure]: + """Return all recorded chunk failures, ordered by (msg_id, chunk_ordinal).""" + cursor = self.db.cursor() + cursor.execute(""" + SELECT msg_id, chunk_ordinal, error_class, error_message, failed_at + FROM ChunkFailures + ORDER BY msg_id, chunk_ordinal + """) + return [ + ChunkFailure( + message_ordinal=row[0], + chunk_ordinal=row[1], + error_class=row[2], + error_message=row[3], + failed_at=datetime.fromisoformat(row[4]), + ) + for row in cursor.fetchall() + ] diff --git a/src/typeagent/storage/sqlite/schema.py b/src/typeagent/storage/sqlite/schema.py index db6933db..99117c24 100644 --- a/src/typeagent/storage/sqlite/schema.py +++ b/src/typeagent/storage/sqlite/schema.py @@ -148,6 +148,28 @@ ); """ +# Table for tracking knowledge-extraction failures at the chunk level. +# Each row records a (message_ordinal, chunk_ordinal) pair whose extraction +# failed (typically because the LLM returned malformed JSON or an invalid +# schema). The message text itself is still stored in the Messages table; only +# the *enrichment* of that chunk is missing. A future "re-extract" tool can +# read this table to retry just the failed chunks. +CHUNK_FAILURES_SCHEMA = """ +CREATE TABLE IF NOT EXISTS ChunkFailures ( + msg_id INTEGER NOT NULL, -- Message ordinal (matches Messages.msg_id) + chunk_ordinal INTEGER NOT NULL, -- 0-based index into the message's text_chunks + error_class TEXT NOT NULL, -- Fully-qualified class name of the failure + error_message TEXT NOT NULL, -- Human-readable failure description + failed_at TEXT NOT NULL, -- ISO-8601 UTC timestamp of the failure + + PRIMARY KEY (msg_id, chunk_ordinal) +); +""" + +CHUNK_FAILURES_MSG_INDEX = """ +CREATE INDEX IF NOT EXISTS idx_chunk_failures_msg ON ChunkFailures(msg_id); +""" + # Type aliases for database row tuples type ShreddedMessage = tuple[ str | None, str | None, str | None, str | None, str | None, str | None @@ -271,6 +293,7 @@ def init_db_schema(db: sqlite3.Connection) -> None: cursor.execute(RELATED_TERMS_FUZZY_SCHEMA) cursor.execute(TIMESTAMP_INDEX_SCHEMA) cursor.execute(INGESTED_SOURCES_SCHEMA) + cursor.execute(CHUNK_FAILURES_SCHEMA) # Create additional indexes cursor.execute(SEMANTIC_REF_INDEX_TERM_INDEX) @@ -279,6 +302,7 @@ def init_db_schema(db: sqlite3.Connection) -> None: cursor.execute(RELATED_TERMS_ALIASES_TERM_INDEX) cursor.execute(RELATED_TERMS_ALIASES_ALIAS_INDEX) cursor.execute(RELATED_TERMS_FUZZY_TERM_INDEX) + cursor.execute(CHUNK_FAILURES_MSG_INDEX) def get_db_schema_version(db: sqlite3.Connection) -> int: diff --git a/src/typeagent/storage/sqlite/semrefindex.py b/src/typeagent/storage/sqlite/semrefindex.py index 682b8e7d..ac68a1e0 100644 --- a/src/typeagent/storage/sqlite/semrefindex.py +++ b/src/typeagent/storage/sqlite/semrefindex.py @@ -3,6 +3,7 @@ """SQLite-based semantic reference index implementation.""" +from collections.abc import Sequence import re import sqlite3 import unicodedata @@ -56,6 +57,33 @@ async def add_term( return term + async def add_terms_batch( + self, + terms: Sequence[ + tuple[ + str, interfaces.SemanticRefOrdinal | interfaces.ScoredSemanticRefOrdinal + ] + ], + ) -> None: + if not terms: + return + rows = [] + for term, ordinal in terms: + if not term: + continue + term = self._prepare_term(term) + if isinstance(ordinal, interfaces.ScoredSemanticRefOrdinal): + semref_id = ordinal.semantic_ref_ordinal + else: + semref_id = ordinal + rows.append((term, semref_id)) + if rows: + cursor = self.db.cursor() + cursor.executemany( + "INSERT OR IGNORE INTO SemanticRefIndex (term, semref_id) VALUES (?, ?)", + rows, + ) + async def remove_term( self, term: str, semantic_ref_ordinal: interfaces.SemanticRefOrdinal ) -> None: diff --git a/src/typeagent/transcripts/README.md b/src/typeagent/transcripts/README.md index 679ea0de..a2a52be0 100644 --- a/src/typeagent/transcripts/README.md +++ b/src/typeagent/transcripts/README.md @@ -23,10 +23,10 @@ various types of transcripts. ```python from typeagent.transcripts.transcript_ingest import ingest_vtt_transcript from typeagent.knowpro.convsettings import ConversationSettings -from typeagent.aitools import utils +from dotenv import load_dotenv # Load environment variables for API keys from .env file -utils.load_dotenv() +load_dotenv() # Create settings (tweak as needed) settings = ConversationSettings() diff --git a/src/typeagent/transcripts/transcript.py b/src/typeagent/transcripts/transcript.py index 494166ba..5033e293 100644 --- a/src/typeagent/transcripts/transcript.py +++ b/src/typeagent/transcripts/transcript.py @@ -143,13 +143,19 @@ async def deserialize( @staticmethod def _read_conversation_data_from_file( - filename_prefix: str, embedding_size: int + filename_prefix: str, ) -> ConversationDataWithIndexes[Any]: """Read transcript conversation data from files. No exceptions are caught; they just bubble out.""" with open(filename_prefix + "_data.json", "r", encoding="utf-8") as f: json_data: serialization.ConversationJsonData[TranscriptMessageData] = ( json.load(f) ) + embedding_file_header = json_data.get("embeddingFileHeader") + embedding_size = 0 + if embedding_file_header: + model_metadata = embedding_file_header.get("modelMetadata") + if model_metadata: + embedding_size = model_metadata.get("embeddingSize", 0) embeddings_list: list[NormalizedEmbeddings] | None = None if embedding_size: with open(filename_prefix + "_embeddings.bin", "rb") as f: @@ -159,7 +165,7 @@ def _read_conversation_data_from_file( embeddings_list = [embeddings] else: print( - "Warning: not reading embeddings file because size is {embedding_size}" + f"Warning: not reading embeddings file because size is {embedding_size}" ) embeddings_list = None file_data = serialization.ConversationFileData( @@ -178,10 +184,7 @@ async def read_from_file( settings: ConversationSettings, dbname: str | None = None, ) -> "Transcript": - embedding_size = settings.embedding_model.embedding_size - data = Transcript._read_conversation_data_from_file( - filename_prefix, embedding_size - ) + data = Transcript._read_conversation_data_from_file(filename_prefix) provider = await settings.get_storage_provider() msgs = await provider.get_message_collection() diff --git a/tests/conftest.py b/tests/conftest.py index 3533e23d..7f0f11f5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,21 +1,18 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -from collections.abc import AsyncGenerator, Callable, Iterator +from collections.abc import AsyncGenerator, Callable, Iterator, Sequence import os from pathlib import Path import tempfile from typing import Any +from dotenv import load_dotenv import pytest import pytest_asyncio -from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage -from openai.types.embedding import Embedding -import tiktoken - -from typeagent.aitools import utils -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.embeddings import IEmbeddingModel +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( ConversationSettings, @@ -35,7 +32,7 @@ SemanticRef, TextLocation, ) -from typeagent.knowpro.kplib import KnowledgeResponse +from typeagent.knowpro.knowledge_schema import KnowledgeResponse from typeagent.knowpro.secindex import ConversationSecondaryIndexes from typeagent.storage import SqliteStorageProvider from typeagent.storage.memory import MemoryStorageProvider @@ -78,21 +75,21 @@ def has_testdata_file(filename: str) -> bool: @pytest.fixture(scope="session") def needs_auth() -> None: - utils.load_dotenv() + load_dotenv() @pytest.fixture(scope="session") def really_needs_auth() -> None: - utils.load_dotenv() + load_dotenv() # Check if any of the supported API keys is set if not (os.getenv("OPENAI_API_KEY") or os.getenv("AZURE_OPENAI_API_KEY")): pytest.skip("No API key found") @pytest.fixture(scope="session") -def embedding_model() -> AsyncEmbeddingModel: +def embedding_model() -> IEmbeddingModel: """Fixture to create a test embedding model with small embedding size for faster tests.""" - return AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + return create_test_embedding_model() @pytest.fixture(scope="session") @@ -130,7 +127,7 @@ def temp_db_path() -> Iterator[str]: @pytest.fixture def memory_storage( - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, ) -> MemoryStorageProvider: """Create a memory storage provider with settings.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model=embedding_model) @@ -188,7 +185,7 @@ def get_text_location(self) -> TextLocation: @pytest_asyncio.fixture async def sqlite_storage( - temp_db_path: str, embedding_model: AsyncEmbeddingModel + temp_db_path: str, embedding_model: IEmbeddingModel ) -> AsyncGenerator[SqliteStorageProvider[FakeMessage], None]: """Create a SqliteStorageProvider for testing.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -239,6 +236,13 @@ async def add_term( self.term_to_refs[term].append(scored_ref) return term + async def add_terms_batch( + self, + terms: Sequence[tuple[str, int | ScoredSemanticRefOrdinal]], + ) -> None: + for term, ordinal in terms: + await self.add_term(term, ordinal) + async def remove_term(self, term: str, semantic_ref_ordinal: int) -> None: if term in self.term_to_refs: self.term_to_refs[term] = [ @@ -299,7 +303,7 @@ def __init__( self._has_secondary_indexes = has_secondary_indexes else: # Create test model for settings - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() self.settings = ConversationSettings(test_model, storage_provider) self._needs_async_init = False self._storage_provider = storage_provider @@ -319,7 +323,7 @@ def __init__( async def ensure_initialized(self): """Ensure async initialization is complete.""" if self._needs_async_init: - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() self.settings = ConversationSettings(test_model) storage_provider = await self.settings.get_storage_provider() self._storage_provider = storage_provider @@ -351,79 +355,3 @@ async def fake_conversation_with_storage( ) -> FakeConversation: """Fixture to create a FakeConversation instance with storage provider.""" return FakeConversation(storage_provider=memory_storage) - - -class FakeEmbeddings: - - def __init__( - self, - max_batch_size: int = 2048, - max_chunk_size: int = 4096, - max_elements_per_batch: int = 300_000, - use_tiktoken: bool = False, - ): - self.model_name = "text-embedding-ada-002" - self.call_count = 0 - self.max_batch_size = max_batch_size - self.max_chunk_size = max_chunk_size - self.max_elements_per_batch = max_elements_per_batch - self.use_tiktoken = use_tiktoken - - def reset_counter(self): - self.call_count = 0 - - async def create(self, **kwargs): - self.call_count += 1 - input = kwargs["input"] - len_input = len(input) - if len_input > self.max_batch_size: - raise ValueError("Embedding model received batch larger 2048") - dimensions = 1536 - if "dimensions" in kwargs: - dimensions = kwargs["dimensions"] - - embedding_result = [] - total_elements = 0 - for index in range(len_input): - entity = input[index] - if self.use_tiktoken: - enc_name = tiktoken.encoding_name_for_model(self.model_name) - enc = tiktoken.get_encoding(enc_name) - entity = enc.encode(entity) - total_elements += len(entity) - if len(entity) > self.max_chunk_size: - raise ValueError( - f"Chunk size {len(entity)} larger than max size {self.max_chunk_size}" - ) - value = index % 2 - embedding_result.append( - Embedding( - embedding=[value] * dimensions, index=index, object="embedding" - ) - ) - - if total_elements > self.max_elements_per_batch: - raise ValueError( - f"Batch size {total_elements} larger than max tokens/chars per batch {self.max_elements_per_batch}" - ) - - response = CreateEmbeddingResponse( - data=embedding_result, - model="test_model", - object="list", - usage=Usage(prompt_tokens=0, total_tokens=0), - ) - - return response - - -@pytest.fixture -def fake_embeddings() -> FakeEmbeddings: - """Fixture to create a FaceEmbedding instance""" - return FakeEmbeddings(max_batch_size=2048, max_chunk_size=4096 * 3) - - -@pytest.fixture -def fake_embeddings_tiktoken() -> FakeEmbeddings: - """Fixture to create a FaceEmbedding instance""" - return FakeEmbeddings(max_batch_size=2048, max_chunk_size=4096, use_tiktoken=True) diff --git a/tests/test_add_messages_streaming.py b/tests/test_add_messages_streaming.py new file mode 100644 index 00000000..bdc25ede --- /dev/null +++ b/tests/test_add_messages_streaming.py @@ -0,0 +1,293 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for add_messages_streaming.""" + +from collections.abc import AsyncIterator +import os +import tempfile + +import pytest + +import typechat + +from typeagent.aitools.model_adapters import create_test_embedding_model +from typeagent.knowpro import knowledge_schema as kplib +from typeagent.knowpro.convsettings import ConversationSettings +from typeagent.knowpro.interfaces_core import IKnowledgeExtractor +from typeagent.storage.sqlite.provider import SqliteStorageProvider +from typeagent.transcripts.transcript import ( + Transcript, + TranscriptMessage, + TranscriptMessageMeta, +) + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_message( + text: str, + speaker: str = "Alice", + source_id: str | None = None, +) -> TranscriptMessage: + return TranscriptMessage( + text_chunks=[text], + metadata=TranscriptMessageMeta(speaker=speaker), + tags=["test"], + source_id=source_id, + ) + + +async def _create_transcript( + db_path: str, + *, + auto_extract: bool = False, + knowledge_extractor: IKnowledgeExtractor | None = None, +) -> tuple[Transcript, SqliteStorageProvider]: + model = create_test_embedding_model() + settings = ConversationSettings(model=model) + settings.semantic_ref_index_settings.auto_extract_knowledge = auto_extract + if knowledge_extractor is not None: + settings.semantic_ref_index_settings.knowledge_extractor = knowledge_extractor + storage = SqliteStorageProvider( + db_path, + message_type=TranscriptMessage, + message_text_index_settings=settings.message_text_index_settings, + related_term_index_settings=settings.related_term_index_settings, + ) + settings.storage_provider = storage + transcript = await Transcript.create(settings, name="test") + return transcript, storage + + +async def _async_iter( + items: list[TranscriptMessage], +) -> AsyncIterator[TranscriptMessage]: + for item in items: + yield item + + +def _ingested_count(storage: SqliteStorageProvider) -> int: + cursor = storage.db.cursor() + cursor.execute("SELECT COUNT(*) FROM IngestedSources") + return cursor.fetchone()[0] + + +def _failure_count(storage: SqliteStorageProvider) -> int: + cursor = storage.db.cursor() + cursor.execute("SELECT COUNT(*) FROM ChunkFailures") + return cursor.fetchone()[0] + + +# --------------------------------------------------------------------------- +# A test IKnowledgeExtractor that lets us control per-call results +# --------------------------------------------------------------------------- + +_EMPTY_RESPONSE = kplib.KnowledgeResponse( + entities=[], actions=[], inverse_actions=[], topics=[] +) + + +class ControlledExtractor: + """An IKnowledgeExtractor that returns Success or Failure per call. + + ``fail_on`` is a set of 0-based call indices for which the extractor + returns a Failure instead of a Success. + ``raise_on`` is a set of call indices that raise an exception. + """ + + def __init__( + self, + *, + fail_on: set[int] | None = None, + raise_on: set[int] | None = None, + ) -> None: + self.fail_on = fail_on or set() + self.raise_on = raise_on or set() + self.call_count = 0 + + async def extract(self, message: str) -> typechat.Result[kplib.KnowledgeResponse]: + idx = self.call_count + self.call_count += 1 + if idx in self.raise_on: + raise RuntimeError(f"Systemic failure at call {idx}") + if idx in self.fail_on: + return typechat.Failure(f"Extraction failed for call {idx}") + return typechat.Success(_EMPTY_RESPONSE) + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_streaming_basic() -> None: + """Streaming ingest of a few messages with no extraction.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [_make_message(f"msg-{i}") for i in range(5)] + result = await transcript.add_messages_streaming(_async_iter(msgs)) + + assert result.messages_added == 5 + assert await transcript.messages.size() == 5 + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_batching() -> None: + """Messages are committed in batches of the requested size.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [_make_message(f"msg-{i}", source_id=f"s-{i}") for i in range(7)] + result = await transcript.add_messages_streaming( + _async_iter(msgs), batch_size=3 + ) + + # 3 batches: [0,1,2], [3,4,5], [6] + assert result.messages_added == 7 + assert await transcript.messages.size() == 7 + # All 7 sources marked + assert _ingested_count(storage) == 7 + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_skips_already_ingested() -> None: + """Messages whose source_id is already ingested are skipped.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + # Pre-mark some sources as ingested + async with storage: + await storage.mark_source_ingested("s-1") + await storage.mark_source_ingested("s-3") + + msgs = [_make_message(f"msg-{i}", source_id=f"s-{i}") for i in range(5)] + result = await transcript.add_messages_streaming(_async_iter(msgs)) + + # s-1 and s-3 skipped -> only 3 added + assert result.messages_added == 3 + assert await transcript.messages.size() == 3 + assert _ingested_count(storage) == 5 # 2 pre-existing + 3 new + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_no_source_id_always_ingested() -> None: + """Messages without source_id are always ingested (never skipped).""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [_make_message(f"msg-{i}") for i in range(3)] + result = await transcript.add_messages_streaming(_async_iter(msgs)) + + assert result.messages_added == 3 + assert _ingested_count(storage) == 0 # no source IDs to track + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_records_chunk_failures() -> None: + """Extraction Failure results are recorded, not raised.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + extractor = ControlledExtractor(fail_on={1}) # second chunk fails + transcript, storage = await _create_transcript( + db_path, auto_extract=True, knowledge_extractor=extractor + ) + + msgs = [ + _make_message("good chunk 0"), + _make_message("bad chunk 1"), + _make_message("good chunk 2"), + ] + result = await transcript.add_messages_streaming(_async_iter(msgs)) + + assert result.messages_added == 3 + assert _failure_count(storage) == 1 + + failures = await storage.get_chunk_failures() + assert len(failures) == 1 + assert failures[0].message_ordinal == 1 + assert failures[0].chunk_ordinal == 0 + assert "Extraction failed" in failures[0].error_message + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_exception_stops_run() -> None: + """A raised exception stops processing; committed batches survive.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + # Raise on the 4th extract call (first chunk of second batch) + extractor = ControlledExtractor(raise_on={3}) + transcript, storage = await _create_transcript( + db_path, auto_extract=True, knowledge_extractor=extractor + ) + + msgs = [_make_message(f"msg-{i}", source_id=f"s-{i}") for i in range(6)] + + with pytest.raises(ExceptionGroup) as exc_info: + await transcript.add_messages_streaming(_async_iter(msgs), batch_size=3) + + # Verify the wrapped exception is our RuntimeError + assert any( + isinstance(e, RuntimeError) and "Systemic failure" in str(e) + for e in exc_info.value.exceptions + ) + + # First batch (3 messages, 3 extract calls 0-2) committed + assert await transcript.messages.size() == 3 + assert _ingested_count(storage) == 3 + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_empty_iterable() -> None: + """Streaming with no messages returns zeros.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + result = await transcript.add_messages_streaming(_async_iter([])) + + assert result.messages_added == 0 + assert result.semrefs_added == 0 + + await storage.close() + + +@pytest.mark.asyncio +async def test_streaming_all_skipped_batch() -> None: + """A batch where all messages are already ingested produces no commit.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + # Pre-mark all sources + async with storage: + for i in range(3): + await storage.mark_source_ingested(f"s-{i}") + + msgs = [_make_message(f"msg-{i}", source_id=f"s-{i}") for i in range(3)] + result = await transcript.add_messages_streaming(_async_iter(msgs)) + + assert result.messages_added == 0 + assert await transcript.messages.size() == 0 + + await storage.close() diff --git a/tests/test_add_messages_with_indexing.py b/tests/test_add_messages_with_indexing.py index 4f00cfb1..d3df2c4d 100644 --- a/tests/test_add_messages_with_indexing.py +++ b/tests/test_add_messages_with_indexing.py @@ -8,7 +8,7 @@ import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.storage.sqlite.provider import SqliteStorageProvider from typeagent.transcripts.transcript import ( @@ -24,7 +24,7 @@ async def test_add_messages_with_indexing_basic(): with tempfile.TemporaryDirectory() as tmpdir: db_path = os.path.join(tmpdir, "test.db") - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False @@ -65,7 +65,7 @@ async def test_add_messages_with_indexing_batched(): with tempfile.TemporaryDirectory() as tmpdir: db_path = os.path.join(tmpdir, "test.db") - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False @@ -122,7 +122,7 @@ async def test_transaction_rollback_on_error(): with tempfile.TemporaryDirectory() as tmpdir: db_path = os.path.join(tmpdir, "test.db") - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False diff --git a/tests/test_answers.py b/tests/test_answers.py new file mode 100644 index 00000000..888924b2 --- /dev/null +++ b/tests/test_answers.py @@ -0,0 +1,192 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio + +from typeagent.knowpro.answers import ( + facets_to_merged_facets, + get_enclosing_date_range_for_text_range, + get_enclosing_text_range, + merged_facets_to_facets, + text_range_from_message_range, +) +from typeagent.knowpro.interfaces import TextLocation, TextRange +from typeagent.knowpro.knowledge_schema import Facet + +from conftest import FakeMessage, FakeMessageCollection + +# --------------------------------------------------------------------------- +# Change 1: facets_to_merged_facets uses str(facet.value), not str(facet) +# --------------------------------------------------------------------------- + + +class TestFacetsToMergedFacets: + """Verify that facet *values* (not the whole Facet object) are stringified.""" + + def test_string_value(self) -> None: + facets = [Facet(name="colour", value="red")] + merged = facets_to_merged_facets(facets) + assert merged == {"colour": ["red"]} + + def test_numeric_value(self) -> None: + facets = [Facet(name="age", value=30.0)] + merged = facets_to_merged_facets(facets) + # Should be "30.0", NOT "Facet('age', 30.0)" + assert merged == {"age": ["30.0"]} + assert "Facet" not in merged["age"][0] + + def test_bool_value(self) -> None: + facets = [Facet(name="active", value=True)] + merged = facets_to_merged_facets(facets) + assert merged == {"active": ["true"]} + + def test_multiple_facets_same_name(self) -> None: + facets = [ + Facet(name="tag", value="a"), + Facet(name="tag", value="b"), + ] + merged = facets_to_merged_facets(facets) + assert merged == {"tag": ["a", "b"]} + + def test_lowercases_names_and_values(self) -> None: + facets = [Facet(name="Colour", value="RED")] + merged = facets_to_merged_facets(facets) + assert "colour" in merged + assert merged["colour"] == ["red"] + + def test_roundtrip_through_merged(self) -> None: + """facets_to_merged_facets -> merged_facets_to_facets preserves semantics.""" + original = [ + Facet(name="colour", value="red"), + Facet(name="colour", value="blue"), + Facet(name="size", value="large"), + ] + merged = facets_to_merged_facets(original) + restored = merged_facets_to_facets(merged) + restored_by_name = {f.name: f.value for f in restored} + assert restored_by_name["colour"] == "red; blue" + assert restored_by_name["size"] == "large" + + +# --------------------------------------------------------------------------- +# Change 2: get_enclosing_date_range_for_text_range uses ordinal-1 for end +# --------------------------------------------------------------------------- + + +class TestGetEnclosingDateRangeForTextRange: + """Verify the off-by-one fix: end is exclusive, so we subtract 1.""" + + @pytest_asyncio.fixture() + async def messages(self) -> AsyncGenerator[FakeMessageCollection, None]: + """Three messages with ordinals 0, 1, 2 and timestamps derived from them.""" + coll = FakeMessageCollection() + for i in range(3): + msg = FakeMessage("text", message_ordinal=i) + await coll.append(msg) + yield coll + + @pytest.mark.asyncio + async def test_single_message_range(self, messages: FakeMessageCollection) -> None: + """Point range (end=None) should use only the start message's timestamp.""" + tr = TextRange(start=TextLocation(1)) + dr = await get_enclosing_date_range_for_text_range(messages, tr) + assert dr is not None + assert dr.start.hour == 1 + assert dr.end is None + + @pytest.mark.asyncio + async def test_multi_message_range_uses_exclusive_end( + self, messages: FakeMessageCollection + ) -> None: + """Range [0, 2) should use message 2 (the exclusive end) for end timestamp.""" + tr = TextRange( + start=TextLocation(0), + end=TextLocation(2), # exclusive end + ) + dr = await get_enclosing_date_range_for_text_range(messages, tr) + assert dr is not None + assert dr.start.hour == 0 + # End timestamp comes from the message at the exclusive end ordinal: + assert dr.end is not None + assert dr.end.hour == 2 + + @pytest.mark.asyncio + async def test_adjacent_messages(self, messages: FakeMessageCollection) -> None: + """Range [1, 2) covers only message 1; end timestamp is message 2.""" + tr = TextRange( + start=TextLocation(1), + end=TextLocation(2), + ) + dr = await get_enclosing_date_range_for_text_range(messages, tr) + assert dr is not None + assert dr.start.hour == 1 + assert dr.end is not None + assert dr.end.hour == 2 # exclusive end: timestamp of the next message + + @pytest.mark.asyncio + async def test_end_past_last_message(self, messages: FakeMessageCollection) -> None: + """If the exclusive end ordinal is past the last message, end is None.""" + tr = TextRange( + start=TextLocation(0), + end=TextLocation(3), # messages only have ordinals 0, 1, 2 + ) + dr = await get_enclosing_date_range_for_text_range(messages, tr) + assert dr is not None + assert dr.start.hour == 0 + assert dr.end is None + + @pytest.mark.asyncio + async def test_no_timestamp_returns_none(self) -> None: + """If start message has no timestamp, return None.""" + coll = FakeMessageCollection() + msg = FakeMessage("text") # no message_ordinal → no timestamp + await coll.append(msg) + tr = TextRange(start=TextLocation(0)) + dr = await get_enclosing_date_range_for_text_range(coll, tr) + assert dr is None + + +# --------------------------------------------------------------------------- +# Helper functions (also exercised for completeness) +# --------------------------------------------------------------------------- + + +class TestGetEnclosingTextRange: + def test_single_ordinal(self) -> None: + tr = get_enclosing_text_range([5]) + assert tr is not None + assert tr.start.message_ordinal == 5 + assert tr.end is None # point range + + def test_multiple_ordinals(self) -> None: + tr = get_enclosing_text_range([3, 1, 7]) + assert tr is not None + assert tr.start.message_ordinal == 1 + assert tr.end is not None + assert tr.end.message_ordinal == 7 + + def test_empty_ordinals(self) -> None: + tr = get_enclosing_text_range([]) + assert tr is None + + +class TestTextRangeFromMessageRange: + def test_point(self) -> None: + tr = text_range_from_message_range(3, 3) + assert tr is not None + assert tr.start.message_ordinal == 3 + assert tr.end is None + + def test_range(self) -> None: + tr = text_range_from_message_range(2, 5) + assert tr is not None + assert tr.start.message_ordinal == 2 + assert tr.end is not None + assert tr.end.message_ordinal == 5 + + def test_invalid_raises(self) -> None: + with pytest.raises(ValueError, match="Expect message ordinal range"): + text_range_from_message_range(5, 2) diff --git a/tests/test_collections.py b/tests/test_collections.py index f35c3fe9..0f61e183 100644 --- a/tests/test_collections.py +++ b/tests/test_collections.py @@ -25,7 +25,7 @@ TextLocation, TextRange, ) -from typeagent.knowpro.kplib import Action, ConcreteEntity +from typeagent.knowpro.knowledge_schema import Action, ConcreteEntity from typeagent.storage.memory.collections import MemorySemanticRefCollection @@ -110,11 +110,15 @@ def test_text_range_collection_add_and_check(): assert len(collection) == 2 - assert collection.is_in_range(range1) is True - assert collection.is_in_range(range2) is True - assert collection.is_in_range(range3) is False - assert collection.is_in_range(range4) is False - assert collection.is_in_range(range5) is False + assert collection.contains_range(range1) is True + assert collection.contains_range(range2) is True + assert ( + collection.contains_range(range3) is True + ) # range3 [5,10) is inside range1 [0,10) + assert ( + collection.contains_range(range4) is False + ) # range4 [5,25) spans across ranges + assert collection.contains_range(range5) is False def test_text_ranges_in_scope(): @@ -406,6 +410,90 @@ def test_match_accumulator_select_top_n_scoring(): assert matches[1].value == "medium" +def test_match_accumulator_add_non_exact_match(): + """Non-exact (related) matches must start with hit_count=0.""" + accumulator = MatchAccumulator[str]() + accumulator.add("related_term", score=0.7, is_exact_match=False) + + match = accumulator.get_match("related_term") + assert match is not None + assert match.hit_count == 0 + assert match.score == 0.0 + assert match.related_hit_count == 1 + assert match.related_score == 0.7 + + +def test_match_accumulator_non_exact_filtered_by_min_hit_count(): + """Related-only matches should be excluded by min_hit_count=1 filter.""" + accumulator = MatchAccumulator[str]() + accumulator.add("exact_term", score=1.0, is_exact_match=True) + accumulator.add("related_term", score=0.9, is_exact_match=False) + + matches = list(accumulator._matches_with_min_hit_count(min_hit_count=1)) # type: ignore + assert len(matches) == 1 + assert matches[0].value == "exact_term" + + +def test_match_accumulator_related_then_exact_same_value(): + """Adding a related match then an exact match for the same value.""" + accumulator = MatchAccumulator[str]() + accumulator.add("term", score=0.5, is_exact_match=False) + accumulator.add("term", score=1.0, is_exact_match=True) + + match = accumulator.get_match("term") + assert match is not None + assert match.hit_count == 1 + assert match.score == 1.0 + assert match.related_hit_count == 1 + assert match.related_score == 0.5 + + +def test_match_accumulator_exact_then_related_same_value(): + """Adding an exact match then a related match for the same value.""" + accumulator = MatchAccumulator[str]() + accumulator.add("term", score=1.0, is_exact_match=True) + accumulator.add("term", score=0.3, is_exact_match=False) + + match = accumulator.get_match("term") + assert match is not None + assert match.hit_count == 1 + assert match.score == 1.0 + assert match.related_hit_count == 1 + assert match.related_score == 0.3 + + +def test_match_accumulator_multiple_related_accumulate(): + """Multiple related matches for the same value accumulate correctly.""" + accumulator = MatchAccumulator[str]() + accumulator.add("term", score=0.4, is_exact_match=False) + accumulator.add("term", score=0.6, is_exact_match=False) + + match = accumulator.get_match("term") + assert match is not None + assert match.hit_count == 0 + assert match.score == 0.0 + assert match.related_hit_count == 2 + assert match.related_score == pytest.approx(1.0) + + +def test_match_accumulator_total_score_includes_related(): + """calculate_total_score adds smoothed related score to the main score.""" + accumulator = MatchAccumulator[str]() + accumulator.add("exact_only", score=2.0, is_exact_match=True) + accumulator.add("mixed", score=1.0, is_exact_match=True) + accumulator.add("mixed", score=0.5, is_exact_match=False) + + accumulator.calculate_total_score() + + exact_only = accumulator.get_match("exact_only") + mixed = accumulator.get_match("mixed") + assert exact_only is not None + assert mixed is not None + # "mixed" should have a higher score than its raw 1.0 + # because the related_score of 0.5 is added (smoothed). + assert mixed.score > 1.0 + + def test_get_smooth_score(): """Test calculating smooth scores.""" assert get_smooth_score(10.0, 1) == 10.0 # Single hit count, no smoothing diff --git a/tests/test_conversation_metadata.py b/tests/test_conversation_metadata.py index b44583c2..452288bb 100644 --- a/tests/test_conversation_metadata.py +++ b/tests/test_conversation_metadata.py @@ -3,7 +3,6 @@ """Tests for conversation metadata operations in SQLite storage provider.""" -import asyncio from collections.abc import AsyncGenerator from dataclasses import field from datetime import datetime, timezone @@ -17,7 +16,8 @@ from pydantic.dataclasses import dataclass -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.embeddings import IEmbeddingModel +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( ConversationSettings, @@ -25,7 +25,7 @@ RelatedTermIndexSettings, ) from typeagent.knowpro.interfaces import ConversationMetadata, IMessage -from typeagent.knowpro.kplib import KnowledgeResponse +from typeagent.knowpro.knowledge_schema import KnowledgeResponse from typeagent.storage.sqlite.provider import SqliteStorageProvider from typeagent.transcripts.transcript import ( Transcript, @@ -55,7 +55,7 @@ def get_knowledge(self) -> KnowledgeResponse: @pytest_asyncio.fixture async def storage_provider( - temp_db_path: str, embedding_model: AsyncEmbeddingModel + temp_db_path: str, embedding_model: IEmbeddingModel ) -> AsyncGenerator[SqliteStorageProvider[DummyMessage], None]: """Create a SqliteStorageProvider for testing conversation metadata.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -77,7 +77,7 @@ async def storage_provider_memory() -> ( AsyncGenerator[SqliteStorageProvider[DummyMessage], None] ): """Create an in-memory SqliteStorageProvider for testing conversation metadata.""" - embedding_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + embedding_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(embedding_model) message_text_settings = MessageTextIndexSettings(embedding_settings) related_terms_settings = RelatedTermIndexSettings(embedding_settings) @@ -95,97 +95,99 @@ async def storage_provider_memory() -> ( class TestConversationMetadata: """Test conversation metadata operations.""" - def test_get_conversation_metadata_nonexistent( + @pytest.mark.asyncio + async def test_get_conversation_metadata_nonexistent( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test getting metadata before any writes returns empty metadata.""" - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() # Metadata is not initialized until first write, so all fields are None assert metadata.name_tag is None assert metadata.schema_version is None assert metadata.created_at is None assert metadata.updated_at is None - assert metadata.embedding_size is None assert metadata.embedding_model is None assert metadata.tags is None assert metadata.extra is None - def test_update_conversation_timestamps_new( + @pytest.mark.asyncio + async def test_update_conversation_timestamps_new( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test updating conversation metadata timestamps.""" created_at = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) updated_at = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=created_at, updated_at=updated_at, ) - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.name_tag == "conversation" assert metadata.schema_version == 1 assert metadata.created_at == created_at assert metadata.updated_at == updated_at settings = storage_provider.message_text_index_settings.embedding_index_settings - expected_size = settings.embedding_size expected_model = settings.embedding_model.model_name - assert metadata.embedding_size == expected_size assert metadata.embedding_model == expected_model assert metadata.tags is None assert metadata.extra is None - def test_update_conversation_timestamps_existing( + @pytest.mark.asyncio + async def test_update_conversation_timestamps_existing( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test updating existing conversation metadata.""" # Create initial metadata initial_created = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) initial_updated = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=initial_created, updated_at=initial_updated, ) # Update only the updated_at timestamp new_updated = datetime(2024, 1, 2, 15, 30, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps(updated_at=new_updated) + await storage_provider.update_conversation_timestamps(updated_at=new_updated) - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.created_at == initial_created # Unchanged assert metadata.updated_at == new_updated # Changed - def test_update_conversation_timestamps_partial_created_at( + @pytest.mark.asyncio + async def test_update_conversation_timestamps_partial_created_at( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test updating only created_at of existing conversation metadata.""" # Create initial metadata initial_created = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) initial_updated = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=initial_created, updated_at=initial_updated, ) # Update only the created_at timestamp new_created = datetime(2023, 12, 1, 10, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps(created_at=new_created) + await storage_provider.update_conversation_timestamps(created_at=new_created) - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.created_at == new_created # Changed assert metadata.updated_at == initial_updated # Unchanged - def test_update_conversation_timestamps_both_timestamps( + @pytest.mark.asyncio + async def test_update_conversation_timestamps_both_timestamps( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test updating both timestamps of existing conversation metadata.""" # Create initial metadata initial_created = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) initial_updated = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=initial_created, updated_at=initial_updated, ) @@ -193,54 +195,56 @@ def test_update_conversation_timestamps_both_timestamps( # Update both timestamps new_created = datetime(2023, 12, 1, 10, 0, 0, tzinfo=timezone.utc) new_updated = datetime(2024, 1, 2, 15, 30, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=new_created, updated_at=new_updated, ) - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.created_at == new_created assert metadata.updated_at == new_updated - def test_update_conversation_timestamps_no_params( + @pytest.mark.asyncio + async def test_update_conversation_timestamps_no_params( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test calling update with no parameters on existing conversation.""" # Create initial metadata initial_created = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) initial_updated = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=initial_created, updated_at=initial_updated, ) # Call update with no parameters - should not change anything - storage_provider.update_conversation_timestamps() + await storage_provider.update_conversation_timestamps() - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.created_at == initial_created assert metadata.updated_at == initial_updated - def test_update_conversation_timestamps_none_values( + @pytest.mark.asyncio + async def test_update_conversation_timestamps_none_values( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test updating with explicit None values.""" # Create initial metadata initial_created = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) initial_updated = datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=initial_created, updated_at=initial_updated, ) # Update with None values - should not change anything - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=None, updated_at=None ) - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.created_at == initial_created assert metadata.updated_at == initial_updated @@ -264,7 +268,7 @@ def test_get_db_version_with_metadata( @pytest.mark.asyncio async def test_multiple_conversations_different_dbs( - self, embedding_model: AsyncEmbeddingModel + self, embedding_model: IEmbeddingModel ): """Test multiple conversations in different database files.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -302,19 +306,19 @@ async def test_multiple_conversations_different_dbs( try: # Update timestamps for both conversations - provider1.update_conversation_timestamps( + await provider1.update_conversation_timestamps( created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), ) - provider2.update_conversation_timestamps( + await provider2.update_conversation_timestamps( created_at=datetime(2024, 1, 2, 14, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 2, 14, 0, 0, tzinfo=timezone.utc), ) # Verify each conversation sees its own metadata - read_metadata1 = provider1.get_conversation_metadata() - read_metadata2 = provider2.get_conversation_metadata() + read_metadata1 = await provider1.get_conversation_metadata() + read_metadata2 = await provider2.get_conversation_metadata() assert read_metadata1.name_tag == "conversation_conv1" assert read_metadata2.name_tag == "conversation_conv2" @@ -337,7 +341,7 @@ async def test_multiple_conversations_different_dbs( @pytest.mark.asyncio async def test_conversation_metadata_single_per_db( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Test that only one conversation metadata can exist per database.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -366,15 +370,15 @@ async def test_conversation_metadata_single_per_db( try: # Write metadata with alpha provider (first write wins) - provider_alpha.update_conversation_timestamps( + await provider_alpha.update_conversation_timestamps( created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), ) provider_alpha.db.commit() # Both providers should see the same metadata since it's the same DB - alpha_metadata = provider_alpha.get_conversation_metadata() - beta_metadata = provider_beta.get_conversation_metadata() + alpha_metadata = await provider_alpha.get_conversation_metadata() + beta_metadata = await provider_beta.get_conversation_metadata() # They should be the same since there's only one metadata row per DB assert alpha_metadata.name_tag == "conversation_alpha" # First write wins @@ -385,7 +389,8 @@ async def test_conversation_metadata_single_per_db( await provider_alpha.close() await provider_beta.close() - def test_conversation_metadata_with_special_characters( + @pytest.mark.asyncio + async def test_conversation_metadata_with_special_characters( self, storage_provider: SqliteStorageProvider[DummyMessage] ): """Test conversation metadata with special characters in timestamps.""" @@ -400,18 +405,18 @@ def test_conversation_metadata_with_special_characters( # Convert test timestamps to datetime objects for the API for timestamp_str in test_timestamps: timestamp_dt = parse_iso_datetime(timestamp_str) - storage_provider.update_conversation_timestamps( + await storage_provider.update_conversation_timestamps( created_at=timestamp_dt, updated_at=timestamp_dt ) - metadata = storage_provider.get_conversation_metadata() + metadata = await storage_provider.get_conversation_metadata() assert metadata is not None assert metadata.created_at == timestamp_dt assert metadata.updated_at == timestamp_dt @pytest.mark.asyncio async def test_conversation_metadata_persistence( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Test that conversation metadata persists across provider instances.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -431,7 +436,7 @@ async def test_conversation_metadata_persistence( metadata=metadata_input, ) - provider1.update_conversation_timestamps( + await provider1.update_conversation_timestamps( created_at=created_at, updated_at=updated_at, ) @@ -446,13 +451,11 @@ async def test_conversation_metadata_persistence( ) try: - metadata = provider2.get_conversation_metadata() + metadata = await provider2.get_conversation_metadata() assert metadata.name_tag == "conversation_persistent_test" assert metadata.created_at == created_at assert metadata.updated_at == updated_at - expected_size = embedding_settings.embedding_size expected_model = embedding_settings.embedding_model.model_name - assert metadata.embedding_size == expected_size assert metadata.embedding_model == expected_model finally: await provider2.close() @@ -461,15 +464,16 @@ async def test_conversation_metadata_persistence( class TestConversationMetadataEdgeCases: """Test edge cases for conversation metadata operations.""" - def test_empty_string_timestamps( + @pytest.mark.asyncio + async def test_empty_string_timestamps( self, storage_provider_memory: SqliteStorageProvider[DummyMessage] ): """Test behavior with None timestamps (should remain None).""" - storage_provider_memory.update_conversation_timestamps( + await storage_provider_memory.update_conversation_timestamps( created_at=None, updated_at=None ) - metadata = storage_provider_memory.get_conversation_metadata() + metadata = await storage_provider_memory.get_conversation_metadata() # Calling with None values creates a row but leaves timestamps None assert metadata.name_tag == "conversation" assert metadata.schema_version == 1 @@ -478,7 +482,7 @@ def test_empty_string_timestamps( @pytest.mark.asyncio async def test_very_long_name_tag( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Test conversation metadata with very long name_tag.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -497,19 +501,19 @@ async def test_very_long_name_tag( ) try: - provider.update_conversation_timestamps( + await provider.update_conversation_timestamps( created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), ) - metadata = provider.get_conversation_metadata() + metadata = await provider.get_conversation_metadata() assert metadata.name_tag == long_name finally: await provider.close() @pytest.mark.asyncio async def test_unicode_name_tag( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Test conversation metadata with Unicode name_tag.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -528,19 +532,19 @@ async def test_unicode_name_tag( ) try: - provider.update_conversation_timestamps( + await provider.update_conversation_timestamps( created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), ) - metadata = provider.get_conversation_metadata() + metadata = await provider.get_conversation_metadata() assert metadata.name_tag == unicode_name finally: await provider.close() @pytest.mark.asyncio async def test_conversation_metadata_shared_access( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Test shared access to metadata using the same database file.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -564,21 +568,21 @@ async def test_conversation_metadata_shared_access( try: # Update from provider1 - provider1.update_conversation_timestamps( + await provider1.update_conversation_timestamps( created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), ) provider1.db.commit() # Update from provider2 - should update the same metadata row - provider2.update_conversation_timestamps( + await provider2.update_conversation_timestamps( updated_at=datetime(2024, 1, 1, 13, 0, 0, tzinfo=timezone.utc) ) provider2.db.commit() # Both should see the latest state - metadata1 = provider1.get_conversation_metadata() - metadata2 = provider2.get_conversation_metadata() + metadata1 = await provider1.get_conversation_metadata() + metadata2 = await provider2.get_conversation_metadata() assert metadata1 is not None assert metadata2 is not None @@ -589,51 +593,9 @@ async def test_conversation_metadata_shared_access( await provider1.close() await provider2.close() - def test_embedding_metadata_mismatch_raises( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel - ): - """Ensure a mismatch between stored metadata and provided settings raises.""" - embedding_settings = TextEmbeddingIndexSettings(embedding_model) - message_text_settings = MessageTextIndexSettings(embedding_settings) - related_terms_settings = RelatedTermIndexSettings(embedding_settings) - - provider = SqliteStorageProvider( - db_path=temp_db_path, - message_type=DummyMessage, - message_text_index_settings=message_text_settings, - related_term_index_settings=related_terms_settings, - ) - - provider.update_conversation_timestamps( - created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), - ) - provider.db.commit() - asyncio.run(provider.close()) - - mismatched_model = AsyncEmbeddingModel( - embedding_size=embedding_settings.embedding_size + 1, - model_name=embedding_model.model_name, - ) - mismatched_settings = TextEmbeddingIndexSettings( - embedding_model=mismatched_model, - embedding_size=mismatched_model.embedding_size, - ) - - with pytest.raises(ValueError, match="embedding_size"): - SqliteStorageProvider( - db_path=temp_db_path, - message_type=DummyMessage, - message_text_index_settings=MessageTextIndexSettings( - mismatched_settings - ), - related_term_index_settings=RelatedTermIndexSettings( - mismatched_settings - ), - ) - - def test_embedding_model_mismatch_raises( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + @pytest.mark.asyncio + async def test_embedding_model_mismatch_raises( + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Ensure providing a different embedding model name raises.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -647,12 +609,12 @@ def test_embedding_model_mismatch_raises( related_term_index_settings=related_terms_settings, ) - provider.update_conversation_timestamps( + await provider.update_conversation_timestamps( created_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), updated_at=datetime(2024, 1, 1, 12, 0, 0, tzinfo=timezone.utc), ) provider.db.commit() - asyncio.run(provider.close()) + await provider.close() with sqlite3.connect(temp_db_path) as conn: conn.execute( @@ -673,7 +635,7 @@ def test_embedding_model_mismatch_raises( @pytest.mark.asyncio async def test_updated_at_changes_on_add_messages( - self, temp_db_path: str, embedding_model: AsyncEmbeddingModel + self, temp_db_path: str, embedding_model: IEmbeddingModel ): """Test that updated_at timestamp is updated when messages are added.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -689,7 +651,7 @@ async def test_updated_at_changes_on_add_messages( try: # Get initial metadata (should be empty due to lazy initialization) - initial_metadata = provider.get_conversation_metadata() + initial_metadata = await provider.get_conversation_metadata() initial_updated_at = initial_metadata.updated_at assert initial_updated_at is None # No writes yet @@ -713,7 +675,7 @@ async def test_updated_at_changes_on_add_messages( await transcript.add_messages_with_indexing(messages) # Get updated metadata (should now have timestamps) - updated_metadata = provider.get_conversation_metadata() + updated_metadata = await provider.get_conversation_metadata() updated_updated_at = updated_metadata.updated_at # The updated_at timestamp should now be set diff --git a/tests/test_convthreads.py b/tests/test_convthreads.py new file mode 100644 index 00000000..e4d5e2d5 --- /dev/null +++ b/tests/test_convthreads.py @@ -0,0 +1,122 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for storage/memory/convthreads.py.""" + +import pytest + +from typeagent.aitools.model_adapters import create_test_embedding_model +from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings +from typeagent.knowpro.interfaces import TextLocation, TextRange, Thread +from typeagent.knowpro.interfaces_serialization import ConversationThreadData +from typeagent.storage.memory.convthreads import ConversationThreads + + +@pytest.fixture +def settings() -> TextEmbeddingIndexSettings: + return TextEmbeddingIndexSettings(create_test_embedding_model()) + + +@pytest.fixture +def threads(settings: TextEmbeddingIndexSettings) -> ConversationThreads: + return ConversationThreads(settings) + + +def make_thread(description: str, start: int = 0, end: int = 1) -> Thread: + return Thread( + description=description, + ranges=[ + TextRange(start=TextLocation(start), end=TextLocation(end)), + ], + ) + + +@pytest.mark.asyncio +async def test_add_thread_appends(threads: ConversationThreads) -> None: + await threads.add_thread(make_thread("topic one")) + assert len(threads.threads) == 1 + assert threads.threads[0].description == "topic one" + + +@pytest.mark.asyncio +async def test_add_multiple_threads(threads: ConversationThreads) -> None: + await threads.add_thread(make_thread("alpha")) + await threads.add_thread(make_thread("beta")) + await threads.add_thread(make_thread("gamma")) + assert len(threads.threads) == 3 + + +@pytest.mark.asyncio +async def test_clear_resets_state(threads: ConversationThreads) -> None: + await threads.add_thread(make_thread("something")) + threads.clear() + assert len(threads.threads) == 0 + assert len(threads.vector_base) == 0 + + +@pytest.mark.asyncio +async def test_build_index_rebuilds_from_threads(threads: ConversationThreads) -> None: + # Manually add threads without building the vector index. + t1 = make_thread("python programming") + t2 = make_thread("data science") + threads.threads.append(t1) + threads.threads.append(t2) + # build_index should embed all existing threads. + await threads.build_index() + assert len(threads.vector_base) == 2 + + +@pytest.mark.asyncio +async def test_serialize_roundtrip(threads: ConversationThreads) -> None: + await threads.add_thread(make_thread("episode one", 0, 5)) + await threads.add_thread(make_thread("episode two", 5, 10)) + + data = threads.serialize() + assert "threads" in data + thread_list = data["threads"] + assert thread_list is not None + assert len(thread_list) == 2 + + # Deserialize into a fresh instance. + settings = TextEmbeddingIndexSettings(create_test_embedding_model()) + fresh = ConversationThreads(settings) + fresh.deserialize(data) + assert len(fresh.threads) == 2 + assert fresh.threads[0].description == "episode one" + assert fresh.threads[1].description == "episode two" + + +@pytest.mark.asyncio +async def test_deserialize_empty_data(threads: ConversationThreads) -> None: + data: ConversationThreadData = {} # type: ignore[typeddict-item] + threads.deserialize(data) + assert len(threads.threads) == 0 + + +@pytest.mark.asyncio +async def test_serialize_without_embeddings(threads: ConversationThreads) -> None: + # Add a thread without going through add_thread (so no embedding yet). + threads.threads.append(make_thread("bare thread")) + data = threads.serialize() + thread_list = data["threads"] + assert thread_list is not None + assert len(thread_list) == 1 + # Embedding may be None because vector_base has no entries for this slot. + assert thread_list[0]["embedding"] is None or isinstance( + thread_list[0]["embedding"], list + ) + + +@pytest.mark.asyncio +async def test_lookup_thread_returns_matches(threads: ConversationThreads) -> None: + await threads.add_thread(make_thread("machine learning and AI")) + await threads.add_thread(make_thread("cooking recipes")) + results = await threads.lookup_thread("artificial intelligence") + assert len(results) > 0 + assert results[0].thread_ordinal == 0 # ordinal of the matching thread + + +@pytest.mark.asyncio +async def test_lookup_thread_empty_index(threads: ConversationThreads) -> None: + results = await threads.lookup_thread("anything") + assert results == [] diff --git a/tests/test_convutils.py b/tests/test_convutils.py new file mode 100644 index 00000000..b9ac654c --- /dev/null +++ b/tests/test_convutils.py @@ -0,0 +1,60 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +import pytest + +from typeagent.knowpro.convutils import ( + get_time_range_for_conversation, + get_time_range_prompt_section_for_conversation, +) + +from conftest import FakeConversation, FakeMessage + + +class TestGetTimeRangeForConversation: + @pytest.mark.asyncio + async def test_empty_conversation_returns_none(self) -> None: + conv = FakeConversation(messages=[]) + result = await get_time_range_for_conversation(conv) + assert result is None + + @pytest.mark.asyncio + async def test_message_without_timestamp_returns_none(self) -> None: + msg = FakeMessage("hello") # no message_ordinal → timestamp=None + conv = FakeConversation(messages=[msg]) + result = await get_time_range_for_conversation(conv) + assert result is None + + @pytest.mark.asyncio + async def test_single_message_with_timestamp(self) -> None: + msg = FakeMessage("hello", message_ordinal=0) + conv = FakeConversation(messages=[msg]) + result = await get_time_range_for_conversation(conv) + assert result is not None + assert result.start.isoformat().startswith("2020-01-01T00") + + @pytest.mark.asyncio + async def test_multiple_messages_range_start_end(self) -> None: + msgs = [FakeMessage(f"msg{i}", message_ordinal=i) for i in range(3)] + conv = FakeConversation(messages=msgs) + result = await get_time_range_for_conversation(conv) + assert result is not None + assert result.start < result.end # type: ignore[operator] + + +class TestGetTimeRangePromptSection: + @pytest.mark.asyncio + async def test_no_timestamps_returns_none(self) -> None: + conv = FakeConversation(messages=[FakeMessage("hello")]) + result = await get_time_range_prompt_section_for_conversation(conv) + assert result is None + + @pytest.mark.asyncio + async def test_with_timestamps_returns_prompt_section(self) -> None: + msgs = [FakeMessage(f"msg{i}", message_ordinal=i) for i in range(2)] + conv = FakeConversation(messages=msgs) + result = await get_time_range_prompt_section_for_conversation(conv) + assert result is not None + assert result["role"] == "system" + assert "CONVERSATION TIME RANGE" in result["content"] + assert "2020-01-01" in result["content"] diff --git a/tests/test_demo.py b/tests/test_demo.py index 599f006e..39f2f061 100644 --- a/tests/test_demo.py +++ b/tests/test_demo.py @@ -6,7 +6,7 @@ import textwrap import time -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.knowpro.convsettings import ConversationSettings from typeagent.knowpro.interfaces import ScoredSemanticRefOrdinal from typeagent.podcasts import podcast @@ -33,7 +33,7 @@ async def main(filename_prefix: str): settings = ConversationSettings() model = settings.embedding_model assert model is not None - assert isinstance(model, AsyncEmbeddingModel), f"model is {model!r}" + assert isinstance(model, IEmbeddingModel), f"model is {model!r}" assert settings.thread_settings.embedding_model is model assert ( settings.message_text_index_settings.embedding_index_settings.embedding_model diff --git a/tests/test_email_import.py b/tests/test_email_import.py new file mode 100644 index 00000000..371136bc --- /dev/null +++ b/tests/test_email_import.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +from typeagent.emails.email_import import ( + _merge_chunks, + _split_into_paragraphs, + _text_to_chunks, +) + + +class TestMergeChunks: + """Tests for _merge_chunks, specifically the separator-on-empty-chunk fix.""" + + def test_no_leading_separator(self) -> None: + """First chunk must NOT start with the separator.""" + result = list(_merge_chunks(["hello", "world"], "\n\n", 100)) + assert len(result) == 1 + assert result[0] == "hello\n\nworld" + assert not result[0].startswith("\n") + + def test_no_leading_separator_after_yield(self) -> None: + """After yielding a full chunk, the next chunk must not start with separator.""" + # Each piece is 5 chars; max_chunk_length=8 forces a split after each. + pieces = ["aaaaa", "bbbbb", "ccccc"] + result = list(_merge_chunks(pieces, "--", 8)) + for chunk in result: + assert not chunk.startswith("--"), f"chunk {chunk!r} starts with separator" + + def test_single_chunk(self) -> None: + result = list(_merge_chunks(["only"], "\n\n", 100)) + assert result == ["only"] + + def test_empty_input(self) -> None: + result = list(_merge_chunks([], "\n\n", 100)) + assert result == [] + + def test_exact_fit(self) -> None: + """Two chunks that fit exactly within max_chunk_length.""" + # "ab" + "\n\n" + "cd" = 6 chars + result = list(_merge_chunks(["ab", "cd"], "\n\n", 6)) + assert result == ["ab\n\ncd"] + + def test_overflow_splits(self) -> None: + """Chunks that don't fit together should be yielded separately.""" + # "ab" + "\n\n" + "cd" = 6 chars, max is 5 -> must split + result = list(_merge_chunks(["ab", "cd"], "\n\n", 5)) + assert result == ["ab", "cd"] + + def test_truncation_of_oversized_chunk(self) -> None: + """A single chunk longer than max_chunk_length is truncated.""" + result = list(_merge_chunks(["abcdefghij"], "\n\n", 5)) + assert result == ["abcde"] + + def test_multiple_merges_and_splits(self) -> None: + pieces = ["aa", "bb", "cccccc", "dd"] + # "aa" + "--" + "bb" = 6, fits in 8 + # "cccccc" alone = 6, can't merge with previous (6+2+6=14>8), yield "aa--bb" + # "cccccc" + "--" + "dd" = 10 > 8, yield "cccccc" + # "dd" yielded at end + result = list(_merge_chunks(pieces, "--", 8)) + assert result == ["aa--bb", "cccccc", "dd"] + + +class TestSplitIntoParagraphs: + def test_basic_split(self) -> None: + text = "para1\n\npara2\n\npara3" + assert _split_into_paragraphs(text) == ["para1", "para2", "para3"] + + def test_multiple_newlines(self) -> None: + text = "a\n\n\n\nb" + assert _split_into_paragraphs(text) == ["a", "b"] + + def test_no_split(self) -> None: + assert _split_into_paragraphs("single paragraph") == ["single paragraph"] + + def test_leading_trailing_newlines(self) -> None: + text = "\n\nfoo\n\n" + result = _split_into_paragraphs(text) + assert "foo" in result + assert "" not in result + + +class TestTextToChunks: + def test_short_text_single_chunk(self) -> None: + result = _text_to_chunks("short text", max_chunk_length=100) + assert result == ["short text"] + + def test_long_text_splits(self) -> None: + text = "para one\n\npara two\n\npara three" + result = _text_to_chunks(text, max_chunk_length=15) + assert len(result) > 1 + for chunk in result: + assert not chunk.startswith("\n"), f"chunk {chunk!r} has leading newline" + + def test_no_leading_separator_in_any_chunk(self) -> None: + """Regression: no chunk should start with the paragraph separator.""" + text = "A" * 50 + "\n\n" + "B" * 50 + "\n\n" + "C" * 50 + result = _text_to_chunks(text, max_chunk_length=60) + for chunk in result: + assert not chunk.startswith( + "\n\n" + ), f"chunk {chunk!r} has leading separator" diff --git a/tests/test_email_message.py b/tests/test_email_message.py new file mode 100644 index 00000000..17930486 --- /dev/null +++ b/tests/test_email_message.py @@ -0,0 +1,223 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +from typeagent.emails.email_message import EmailMessage, EmailMessageMeta + + +def make_meta( + sender: str = "Alice ", + recipients: list[str] | None = None, + cc: list[str] | None = None, + bcc: list[str] | None = None, + subject: str | None = None, +) -> EmailMessageMeta: + return EmailMessageMeta( + sender=sender, + recipients=recipients or [], + cc=cc or [], + bcc=bcc or [], + subject=subject, + ) + + +class TestEmailMessageMetaProperties: + def test_source_returns_sender(self) -> None: + meta = make_meta(sender="bob@example.com") + assert meta.source == "bob@example.com" + + def test_dest_returns_recipients(self) -> None: + meta = make_meta(recipients=["a@b.com", "c@d.com"]) + assert meta.dest == ["a@b.com", "c@d.com"] + + def test_dest_empty_list(self) -> None: + meta = make_meta(recipients=[]) + assert meta.dest == [] + + +class TestEmailAddressToEntities: + def test_plain_address_no_display_name(self) -> None: + meta = make_meta() + entities = meta._email_address_to_entities("bob@example.com") + names = [e.name for e in entities] + assert "bob@example.com" in names + assert len(entities) == 1 + + def test_address_with_display_name(self) -> None: + meta = make_meta() + entities = meta._email_address_to_entities("Alice ") + names = [e.name for e in entities] + assert "Alice" in names + assert "alice@example.com" in names + assert len(entities) == 2 + + def test_display_name_entity_has_email_facet(self) -> None: + meta = make_meta() + entities = meta._email_address_to_entities("Alice ") + person_entity = next(e for e in entities if e.name == "Alice") + assert person_entity.facets is not None + assert len(person_entity.facets) == 1 + assert person_entity.facets[0].name == "email_address" + assert person_entity.facets[0].value == "alice@example.com" + + def test_display_name_only_no_address(self) -> None: + # parseaddr("Alice") returns ("", "Alice") — treated as address only + meta = make_meta() + entities = meta._email_address_to_entities("Alice") + # No display name, just the address "Alice" + assert len(entities) == 1 + assert entities[0].name == "Alice" + + +class TestToEntities: + def test_entities_include_sender(self) -> None: + meta = make_meta(sender="Alice ") + entities = meta.to_entities() + names = [e.name for e in entities] + assert "Alice" in names + assert "alice@example.com" in names + + def test_entities_include_recipient(self) -> None: + meta = make_meta( + sender="alice@example.com", + recipients=["Bob "], + ) + entities = meta.to_entities() + names = [e.name for e in entities] + assert "Bob" in names + assert "bob@example.com" in names + + def test_entities_include_cc(self) -> None: + meta = make_meta( + sender="a@x.com", + cc=["cc@example.com"], + ) + entities = meta.to_entities() + names = [e.name for e in entities] + assert "cc@example.com" in names + + def test_entities_include_bcc(self) -> None: + meta = make_meta( + sender="a@x.com", + bcc=["bcc@example.com"], + ) + entities = meta.to_entities() + names = [e.name for e in entities] + assert "bcc@example.com" in names + + def test_entities_always_include_email_message_entity(self) -> None: + meta = make_meta() + entities = meta.to_entities() + msg_entity = next((e for e in entities if e.name == "email"), None) + assert msg_entity is not None + assert "message" in msg_entity.type + + +class TestToTopics: + def test_no_subject_returns_empty(self) -> None: + meta = make_meta(subject=None) + assert meta.to_topics() == [] + + def test_subject_returned_as_topic(self) -> None: + meta = make_meta(subject="Hello World") + topics = meta.to_topics() + assert topics == ["Hello World"] + + +class TestToActions: + def test_no_recipients_returns_empty(self) -> None: + meta = make_meta(sender="alice@example.com", recipients=[]) + assert meta.to_actions() == [] + + def test_sent_and_received_actions_created(self) -> None: + meta = make_meta( + sender="Alice ", + recipients=["Bob "], + ) + actions = meta.to_actions() + verbs = [a.verbs[0] for a in actions] + assert "sent" in verbs + assert "received" in verbs + + def test_multiple_recipients_produce_actions(self) -> None: + meta = make_meta( + sender="alice@example.com", + recipients=["bob@example.com", "carol@example.com"], + ) + actions = meta.to_actions() + assert len(actions) > 0 + + def test_action_subject_is_sender(self) -> None: + meta = make_meta( + sender="alice@example.com", + recipients=["bob@example.com"], + ) + actions = meta.to_actions() + sent_actions = [a for a in actions if "sent" in a.verbs] + assert all(a.subject_entity_name == "alice@example.com" for a in sent_actions) + + +class TestGetKnowledge: + def test_get_knowledge_returns_response(self) -> None: + meta = make_meta( + sender="Alice ", + recipients=["Bob "], + subject="Test Subject", + ) + result = meta.get_knowledge() + assert result is not None + assert len(result.entities) > 0 + assert len(result.topics) > 0 + assert len(result.actions) > 0 + + +class TestEmailMessage: + def test_basic_construction(self) -> None: + meta = make_meta(sender="alice@example.com") + msg = EmailMessage( + text_chunks=["Hello world"], + metadata=meta, + ) + assert msg.text_chunks == ["Hello world"] + assert msg.metadata is meta + + def test_get_knowledge_delegates_to_metadata(self) -> None: + meta = make_meta( + sender="Alice ", + recipients=["bob@example.com"], + subject="Hi", + ) + msg = EmailMessage(text_chunks=["body"], metadata=meta) + result = msg.get_knowledge() + assert result is not None + + def test_add_timestamp(self) -> None: + meta = make_meta() + msg = EmailMessage(text_chunks=["body"], metadata=meta) + msg.add_timestamp("2025-01-01T00:00:00") + assert msg.timestamp == "2025-01-01T00:00:00" + + def test_add_content_empty_chunks(self) -> None: + meta = make_meta() + msg = EmailMessage(text_chunks=[], metadata=meta) + msg.add_content("new content") + assert msg.text_chunks == ["new content"] + + def test_add_content_existing_chunk(self) -> None: + meta = make_meta() + msg = EmailMessage(text_chunks=["existing"], metadata=meta) + msg.add_content(" more") + assert msg.text_chunks[0] == "existing more" + + def test_serialize_roundtrip(self) -> None: + meta = make_meta( + sender="Alice ", + recipients=["bob@example.com"], + subject="Hi", + ) + msg = EmailMessage(text_chunks=["Hello"], metadata=meta, tags=["work"]) + data = msg.serialize() + assert isinstance(data, dict) + restored = EmailMessage.deserialize(data) + assert restored.text_chunks == msg.text_chunks + assert restored.metadata.sender == msg.metadata.sender + assert restored.tags == msg.tags diff --git a/tests/test_embedding_consistency.py b/tests/test_embedding_consistency.py index 906c2b52..619c9210 100644 --- a/tests/test_embedding_consistency.py +++ b/tests/test_embedding_consistency.py @@ -1,39 +1,38 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -"""Test embedding consistency checks between database and settings.""" +"""Test embedding consistency checks between database indexes.""" import os +import sqlite3 import tempfile +import numpy as np import pytest from typeagent import create_conversation -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.storage.sqlite import SqliteStorageProvider +from typeagent.storage.sqlite.schema import serialize_embedding from typeagent.transcripts.transcript import TranscriptMessage, TranscriptMessageMeta @pytest.mark.asyncio -async def test_embedding_size_mismatch_in_message_index(): - """Test that opening a DB with mismatched embedding size raises an error.""" - # Create a temporary database file +async def test_same_embedding_size_no_error(): + """Test that opening a DB with the same model works fine.""" with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp: db_path = tmp.name try: - # Create a conversation with test model (embedding size 3) settings1 = ConversationSettings( - model=AsyncEmbeddingModel(embedding_size=3, model_name="test") + model=create_test_embedding_model(embedding_size=3) ) - # Disable LLM knowledge extraction to avoid API key requirement settings1.semantic_ref_index_settings.auto_extract_knowledge = False conv1 = await create_conversation( db_path, TranscriptMessage, settings=settings1 ) - # Add some messages to populate the index messages = [ TranscriptMessage( text_chunks=["Hello world"], @@ -43,108 +42,151 @@ async def test_embedding_size_mismatch_in_message_index(): await conv1.add_messages_with_indexing(messages) await conv1.storage_provider.close() - # Now try to open the same database with a different embedding size - # This should raise an error + # Reopen with same settings — should work settings2 = ConversationSettings( - model=AsyncEmbeddingModel(embedding_size=5, model_name="test") + model=create_test_embedding_model(embedding_size=3) ) - - with pytest.raises(ValueError, match="embedding_size"): - provider = SqliteStorageProvider( - db_path=db_path, - message_type=TranscriptMessage, - message_text_index_settings=settings2.message_text_index_settings, - related_term_index_settings=settings2.related_term_index_settings, - ) - await provider.close() + provider = SqliteStorageProvider( + db_path=db_path, + message_type=TranscriptMessage, + message_text_index_settings=settings2.message_text_index_settings, + related_term_index_settings=settings2.related_term_index_settings, + ) + await provider.close() finally: - # Clean up the temporary database if os.path.exists(db_path): os.unlink(db_path) @pytest.mark.asyncio -async def test_embedding_size_mismatch_in_related_terms(): - """Test that opening a DB with mismatched embedding size in related terms raises an error.""" - # Create a temporary database file +async def test_empty_db_no_error(): + """Test that opening an empty DB doesn't raise an error regardless of embedding size.""" with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp: db_path = tmp.name try: - # Create a conversation with default embedding size settings1 = ConversationSettings( - model=AsyncEmbeddingModel(embedding_size=3, model_name="test") + model=create_test_embedding_model(embedding_size=3) ) - # Disable LLM knowledge extraction to avoid API key requirement settings1.semantic_ref_index_settings.auto_extract_knowledge = False conv1 = await create_conversation( db_path, TranscriptMessage, settings=settings1 ) - - # Add some messages to populate the related terms index - messages = [ - TranscriptMessage( - text_chunks=["Apple is a fruit"], - metadata=TranscriptMessageMeta(speaker="Alice"), - ) - ] - await conv1.add_messages_with_indexing(messages) await conv1.storage_provider.close() - # Now try to open the same database with a different embedding size - # This should raise an error + # Open with different embedding size should work since DB is empty settings2 = ConversationSettings( - model=AsyncEmbeddingModel(embedding_size=5, model_name="test") + model=create_test_embedding_model(embedding_size=5) + ) + provider = SqliteStorageProvider( + db_path=db_path, + message_type=TranscriptMessage, + message_text_index_settings=settings2.message_text_index_settings, + related_term_index_settings=settings2.related_term_index_settings, ) + await provider.close() + + finally: + if os.path.exists(db_path): + os.unlink(db_path) + - with pytest.raises(ValueError, match="embedding_size"): - provider = SqliteStorageProvider( +@pytest.mark.asyncio +async def test_embedding_size_mismatch_raises(): + """Test that mismatched embedding sizes between indexes raises ValueError.""" + with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp: + db_path = tmp.name + + try: + # Create a conversation so that the schema is set up + settings = ConversationSettings( + model=create_test_embedding_model(embedding_size=3) + ) + settings.semantic_ref_index_settings.auto_extract_knowledge = False + conv = await create_conversation(db_path, TranscriptMessage, settings=settings) + await conv.storage_provider.close() + + # Manually insert embeddings of different sizes into the two tables + conn = sqlite3.connect(db_path) + msg_emb = serialize_embedding(np.array([0.1, 0.2, 0.3], dtype=np.float32)) + term_emb = serialize_embedding( + np.array([0.1, 0.2, 0.3, 0.4, 0.5], dtype=np.float32) + ) + conn.execute( + "INSERT INTO MessageTextIndex " + "(msg_id, chunk_ordinal, embedding, index_position) " + "VALUES (0, 0, ?, 0)", + (msg_emb,), + ) + conn.execute( + "INSERT INTO RelatedTermsFuzzy (term, term_embedding) VALUES (?, ?)", + ("hello", term_emb), + ) + conn.commit() + conn.close() + + # Reopening should detect the mismatch + settings2 = ConversationSettings( + model=create_test_embedding_model(embedding_size=3) + ) + with pytest.raises(ValueError, match="Embedding size mismatch"): + SqliteStorageProvider( db_path=db_path, message_type=TranscriptMessage, message_text_index_settings=settings2.message_text_index_settings, related_term_index_settings=settings2.related_term_index_settings, ) - await provider.close() finally: - # Clean up the temporary database if os.path.exists(db_path): os.unlink(db_path) @pytest.mark.asyncio -async def test_empty_db_no_error(): - """Test that opening an empty DB doesn't raise an error regardless of embedding size.""" - # Create a temporary database file +async def test_adding_mismatched_embeddings_raises(): + """Test that adding messages with a different embedding size raises ValueError.""" with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp: db_path = tmp.name try: - # Create an empty database + # Create and populate with size-3 embeddings settings1 = ConversationSettings( - model=AsyncEmbeddingModel(embedding_size=3, model_name="test") + model=create_test_embedding_model(embedding_size=3) ) - # Disable LLM knowledge extraction to avoid API key requirement settings1.semantic_ref_index_settings.auto_extract_knowledge = False conv1 = await create_conversation( db_path, TranscriptMessage, settings=settings1 ) + await conv1.add_messages_with_indexing( + [ + TranscriptMessage( + text_chunks=["Hello world"], + metadata=TranscriptMessageMeta(speaker="Alice"), + ) + ] + ) await conv1.storage_provider.close() - # Open with different embedding size should work since DB is empty + # Reopen with size-5 embeddings and try to add more messages settings2 = ConversationSettings( - model=AsyncEmbeddingModel(embedding_size=5, model_name="test") + model=create_test_embedding_model(embedding_size=5) ) - provider = SqliteStorageProvider( - db_path=db_path, - message_type=TranscriptMessage, - message_text_index_settings=settings2.message_text_index_settings, - related_term_index_settings=settings2.related_term_index_settings, + settings2.semantic_ref_index_settings.auto_extract_knowledge = False + conv2 = await create_conversation( + db_path, TranscriptMessage, settings=settings2 ) - await provider.close() + with pytest.raises(ValueError, match="Embedding size mismatch"): + await conv2.add_messages_with_indexing( + [ + TranscriptMessage( + text_chunks=["Goodbye world"], + metadata=TranscriptMessageMeta(speaker="Bob"), + ) + ] + ) + await conv2.storage_provider.close() finally: - # Clean up the temporary database if os.path.exists(db_path): os.unlink(db_path) diff --git a/tests/test_embeddings.py b/tests/test_embeddings.py index 47ce7e8a..24a4ff69 100644 --- a/tests/test_embeddings.py +++ b/tests/test_embeddings.py @@ -3,57 +3,50 @@ import numpy as np import pytest -from pytest import MonkeyPatch from pytest_mock import MockerFixture -import openai - -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import CachingEmbeddingModel, IEmbeddingModel from conftest import ( embedding_model, # type: ignore # Magic, prevents side effects of mocking ) -from conftest import ( - FakeEmbeddings, -) @pytest.mark.asyncio -async def test_get_embedding_nocache(embedding_model: AsyncEmbeddingModel): +async def test_get_embedding_nocache(embedding_model: CachingEmbeddingModel): """Test retrieving an embedding without using the cache.""" input_text = "Hello, world" embedding = await embedding_model.get_embedding_nocache(input_text) assert isinstance(embedding, np.ndarray) - assert embedding.shape == (embedding_model.embedding_size,) assert embedding.dtype == np.float32 @pytest.mark.asyncio -async def test_get_embeddings_nocache(embedding_model: AsyncEmbeddingModel): +async def test_get_embeddings_nocache(embedding_model: CachingEmbeddingModel): """Test retrieving multiple embeddings without using the cache.""" inputs = ["Hello, world", "Foo bar baz"] embeddings = await embedding_model.get_embeddings_nocache(inputs) assert isinstance(embeddings, np.ndarray) - assert embeddings.shape == (len(inputs), embedding_model.embedding_size) + assert embeddings.shape[0] == len(inputs) assert embeddings.dtype == np.float32 @pytest.mark.asyncio async def test_get_embedding_with_cache( - embedding_model: AsyncEmbeddingModel, mocker: MockerFixture + embedding_model: CachingEmbeddingModel, mocker: MockerFixture ): """Test retrieving an embedding with caching.""" input_text = "Hello, world" # First call should populate the cache embedding1 = await embedding_model.get_embedding(input_text) - assert input_text in embedding_model._embedding_cache + assert input_text in embedding_model._cache - # Mock the nocache method to ensure it's not called + # Mock the nocache method on the underlying embedder to ensure it's not called mock_get_embedding_nocache = mocker.patch.object( - embedding_model, "get_embedding_nocache", autospec=True + embedding_model._embedder, "get_embedding_nocache", autospec=True ) # Second call should retrieve from the cache @@ -66,7 +59,7 @@ async def test_get_embedding_with_cache( @pytest.mark.asyncio async def test_get_embeddings_with_cache( - embedding_model: AsyncEmbeddingModel, mocker: MockerFixture + embedding_model: CachingEmbeddingModel, mocker: MockerFixture ): """Test retrieving multiple embeddings with caching.""" inputs = ["Hello, world", "Foo bar baz"] @@ -74,11 +67,11 @@ async def test_get_embeddings_with_cache( # First call should populate the cache embeddings1 = await embedding_model.get_embeddings(inputs) for input_text in inputs: - assert input_text in embedding_model._embedding_cache + assert input_text in embedding_model._cache - # Mock the nocache method to ensure it's not called + # Mock the nocache method on the underlying embedder to ensure it's not called mock_get_embeddings_nocache = mocker.patch.object( - embedding_model, "get_embeddings_nocache", autospec=True + embedding_model._embedder, "get_embeddings_nocache", autospec=True ) # Second call should retrieve from the cache @@ -90,226 +83,67 @@ async def test_get_embeddings_with_cache( @pytest.mark.asyncio -async def test_get_embeddings_empty_input(embedding_model: AsyncEmbeddingModel): - """Test retrieving embeddings for an empty input list.""" - inputs = [] - embeddings = await embedding_model.get_embeddings(inputs) - - assert isinstance(embeddings, np.ndarray) - assert embeddings.shape == (0, embedding_model.embedding_size) - assert embeddings.dtype == np.float32 +async def test_get_embeddings_empty_input(embedding_model: CachingEmbeddingModel): + """Test retrieving embeddings for an empty input list raises ValueError.""" + with pytest.raises(ValueError, match="Cannot embed an empty list"): + await embedding_model.get_embeddings([]) @pytest.mark.asyncio -async def test_add_embedding_to_cache(embedding_model: AsyncEmbeddingModel): +async def test_add_embedding_to_cache(embedding_model: CachingEmbeddingModel): """Test adding an embedding to the cache.""" key = "test_key" embedding = np.array([0.1, 0.2, 0.3], dtype=np.float32) embedding_model.add_embedding(key, embedding) - assert key in embedding_model._embedding_cache - assert np.array_equal(embedding_model._embedding_cache[key], embedding) + assert key in embedding_model._cache + assert np.array_equal(embedding_model._cache[key], embedding) @pytest.mark.asyncio -async def test_get_embedding_nocache_empty_input(embedding_model: AsyncEmbeddingModel): +async def test_get_embedding_nocache_empty_input( + embedding_model: CachingEmbeddingModel, +): """Test retrieving an embedding with no cache for an empty input.""" - with pytest.raises(openai.OpenAIError): + with pytest.raises(ValueError, match="Empty input text"): await embedding_model.get_embedding_nocache("") @pytest.mark.asyncio -async def test_refresh_auth( - embedding_model: AsyncEmbeddingModel, mocker: MockerFixture -): - """Test refreshing authentication when using Azure.""" - # Note that pyright doesn't understand mocking, hence the `# type: ignore` below - mocker.patch.object(embedding_model, "azure_token_provider", autospec=True) - mocker.patch.object(embedding_model, "_setup_azure", autospec=True) - - embedding_model.azure_token_provider.needs_refresh.return_value = True # type: ignore - embedding_model.azure_token_provider.refresh_token.return_value = "new_token" # type: ignore - embedding_model.azure_api_version = "2023-05-15" - embedding_model.azure_endpoint = "https://example.azure.com" - - await embedding_model.refresh_auth() +async def test_embeddings_are_normalized(embedding_model: CachingEmbeddingModel): + """Test that returned embeddings are unit-normalized.""" + inputs = ["Hello, world", "Foo bar baz", "Testing normalization"] + embeddings = await embedding_model.get_embeddings_nocache(inputs) - embedding_model.azure_token_provider.refresh_token.assert_called_once() # type: ignore - assert embedding_model.async_client is not None + for i in range(len(inputs)): + norm = float(np.linalg.norm(embeddings[i])) + assert abs(norm - 1.0) < 1e-6, f"Embedding {i} not normalized: norm={norm}" @pytest.mark.asyncio -async def test_set_endpoint(monkeypatch: MonkeyPatch): - """Test creating of model with custom endpoint.""" - - monkeypatch.setenv("AZURE_OPENAI_API_KEY", "does-not-matter") - - # Default - monkeypatch.setenv( - "AZURE_OPENAI_ENDPOINT_EMBEDDING", - "http://localhost:7997?api-version=2024-06-01", - ) - embedding_model = AsyncEmbeddingModel() - assert embedding_model.embedding_size == 1536 - assert embedding_model.model_name == "text-embedding-ada-002" - assert embedding_model.endpoint_envvar == "AZURE_OPENAI_ENDPOINT_EMBEDDING" - - # 3-large - monkeypatch.setenv( - "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_LARGE", - "http://localhost:7997?api-version=2024-06-01", - ) - embedding_model = AsyncEmbeddingModel(model_name="text-embedding-3-large") - assert embedding_model.embedding_size == 3072 - assert embedding_model.model_name == "text-embedding-3-large" - assert embedding_model.endpoint_envvar == "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_LARGE" - - # 3-small - monkeypatch.setenv( - "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_SMALL", - "http://localhost:7998?api-version=2024-06-01", - ) - embedding_model = AsyncEmbeddingModel(model_name="text-embedding-3-small") - assert embedding_model.embedding_size == 1536 - assert embedding_model.model_name == "text-embedding-3-small" - assert embedding_model.endpoint_envvar == "AZURE_OPENAI_ENDPOINT_EMBEDDING_3_SMALL" - - # Fully custom - monkeypatch.setenv("OPENAI_API_KEY", "does-not-matter") - monkeypatch.setenv("INFINITY_EMBEDDING_URL", "http://localhost:7997") - embedding_model = AsyncEmbeddingModel( - 1024, "custom_model", "INFINITY_EMBEDDING_URL" - ) - assert embedding_model.embedding_size == 1024 - assert embedding_model.model_name == "custom_model" - # NOTE: checking openai.AsyncOpenAI internals - assert embedding_model.async_client is not None - assert embedding_model.async_client.base_url == "http://localhost:7997" - assert embedding_model.async_client.api_key == "does-not-matter" - - # Customized 3-small - embedding_model = AsyncEmbeddingModel( - 2000, "text-embedding-3-small", "ALTERNATE_ENDPOINT" - ) - assert embedding_model.embedding_size == 2000 - assert embedding_model.model_name == "text-embedding-3-small" - assert embedding_model.endpoint_envvar == "ALTERNATE_ENDPOINT" - - # Allow explicitly setting default embedding size - AsyncEmbeddingModel(1536) - - # Can't customize embedding_size for default model - with pytest.raises(ValueError): - AsyncEmbeddingModel(1024) - - # Not even when default model name specified explicitly - with pytest.raises(ValueError): - AsyncEmbeddingModel(1024, "text-embedding-ada-002") +async def test_embeddings_are_deterministic( + embedding_model: CachingEmbeddingModel, +): + """Test that the same input always produces the same embedding.""" + input_text = "Deterministic test" + e1 = await embedding_model.get_embedding_nocache(input_text) + e2 = await embedding_model.get_embedding_nocache(input_text) + assert np.array_equal(e1, e2) @pytest.mark.asyncio -async def test_embeddings_batching_tiktoken( - fake_embeddings_tiktoken: FakeEmbeddings, monkeypatch: MonkeyPatch +async def test_different_inputs_produce_different_embeddings( + embedding_model: CachingEmbeddingModel, ): - monkeypatch.setenv("OPENAI_API_KEY", "test_key") - - embedding_model = AsyncEmbeddingModel() - assert embedding_model.max_chunk_size == 4096 - - embedding_model.async_client.embeddings = fake_embeddings_tiktoken # type: ignore - - # Check max batch size - inputs = ["a"] * 2049 - embeddings = await embedding_model.get_embeddings(inputs) - assert len(embeddings) == 2049 - assert fake_embeddings_tiktoken.call_count == 2 - - # Check max token size - inputs = ["Very long input longer than 4096 tokens will be truncated" * 500] - embeddings = await embedding_model.get_embeddings(inputs) - assert len(embeddings) == 1 - - fake_embeddings_tiktoken.reset_counter() - - TEST_MAX_TOKEN_SIZE = 10 - TEST_MAX_TOKENS_PER_BATCH = 20 - embedding_model.max_chunk_size = TEST_MAX_TOKEN_SIZE - embedding_model.max_size_per_batch = TEST_MAX_TOKENS_PER_BATCH - fake_embeddings_tiktoken.max_elements_per_batch = TEST_MAX_TOKENS_PER_BATCH - - assert embedding_model.encoding is not None - - token = [500] * 20 # --> 20 tokens - input = [embedding_model.encoding.decode(token)] * 4 - embeddings = await embedding_model.get_embeddings_nocache(input) # type: ignore - - # each input gets truncated to 10 tokens, so 4 inputs fit in 2 batches of 20 tokens - assert fake_embeddings_tiktoken.call_count == 2 - assert len(embeddings) == 4 - - fake_embeddings_tiktoken.reset_counter() - - TEST_MAX_TOKEN_SIZE = 7 - embedding_model.max_chunk_size = TEST_MAX_TOKEN_SIZE - - token = [500] * 20 # --> 20 tokens - input = [embedding_model.encoding.decode(token)] * 5 - embeddings = await embedding_model.get_embeddings_nocache(input) # type: ignore - - # each input gets truncated to 7 tokens, so each batch can hold 2 inputs (14 tokens) - # 5 inputs require 3 batches - assert fake_embeddings_tiktoken.call_count == 3 - assert len(embeddings) == 5 + """Test that different inputs produce different embeddings.""" + e1 = await embedding_model.get_embedding_nocache("Hello") + e2 = await embedding_model.get_embedding_nocache("World") + assert not np.array_equal(e1, e2) @pytest.mark.asyncio -async def test_embeddings_batching( - fake_embeddings: FakeEmbeddings, monkeypatch: MonkeyPatch +async def test_implements_iembedding_model( + embedding_model: CachingEmbeddingModel, ): - monkeypatch.setenv("OPENAI_API_KEY", "test_key") - - embedding_model = AsyncEmbeddingModel(1024, "custom_model") - embedding_model.async_client.embeddings = fake_embeddings # type: ignore - - # Check max batch size - inputs = ["a"] * 2049 - embeddings = await embedding_model.get_embeddings(inputs) - assert len(embeddings) == 2049 - assert fake_embeddings.call_count == 2 - - TEST_MAX_CHAR_SIZE = 10 - TEST_MAX_CHARS_PER_BATCH = 20 - embedding_model.max_chunk_size = TEST_MAX_CHAR_SIZE - embedding_model.max_size_per_batch = TEST_MAX_CHARS_PER_BATCH - fake_embeddings.max_elements_per_batch = TEST_MAX_CHARS_PER_BATCH - - # Check max token size - inputs = ["a" * TEST_MAX_CHAR_SIZE] - embeddings = await embedding_model.get_embeddings_nocache(inputs) - assert len(embeddings) == 1 - assert np.all(embeddings[0] == 0) - - fake_embeddings.reset_counter() - - # Check one over max token size - inputs = ["a" * (TEST_MAX_CHAR_SIZE + 1)] - embeddings = await embedding_model.get_embeddings_nocache(inputs) - assert len(embeddings) == 1 - assert fake_embeddings.call_count == 1 - - fake_embeddings.reset_counter() - - # Check input as large as max_size_per_batch - inputs = ["a" * 10, "a" * 5, "a" * 5] - embeddings = await embedding_model.get_embeddings_nocache(inputs) # type: ignore - assert fake_embeddings.call_count == 1 - assert len(embeddings) == 3 - - fake_embeddings.reset_counter() - - # Check input larger than max_size_per_batch - # max chars per batch is 20, so 10*10 chars requires 5 batches - inputs = ["a" * 10] * 10 - embeddings = await embedding_model.get_embeddings_nocache(inputs) # type: ignore - assert fake_embeddings.call_count == 5 - assert len(embeddings) == 10 + """Test that CachingEmbeddingModel satisfies the IEmbeddingModel protocol.""" + assert isinstance(embedding_model, IEmbeddingModel) diff --git a/tests/test_factory.py b/tests/test_factory.py index 0f62220f..44c45e5f 100644 --- a/tests/test_factory.py +++ b/tests/test_factory.py @@ -6,7 +6,7 @@ import pytest from typeagent import create_conversation -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.transcripts.transcript import TranscriptMessage, TranscriptMessageMeta @@ -15,7 +15,7 @@ async def test_create_conversation_minimal(): """Test creating a conversation with minimal parameters.""" # Create empty conversation with test model - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) conversation = await create_conversation( None, @@ -36,7 +36,7 @@ async def test_create_conversation_minimal(): @pytest.mark.asyncio async def test_create_conversation_with_tags(): """Test creating a conversation with tags.""" - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) conversation = await create_conversation( None, @@ -54,7 +54,7 @@ async def test_create_conversation_with_tags(): async def test_create_conversation_and_add_messages(really_needs_auth): """Test the complete workflow: create conversation and add messages.""" # 1. Create empty conversation - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) conversation = await create_conversation( None, diff --git a/tests/test_incremental_index.py b/tests/test_incremental_index.py index 12f706a6..ced12f19 100644 --- a/tests/test_incremental_index.py +++ b/tests/test_incremental_index.py @@ -8,7 +8,7 @@ import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.storage.sqlite.provider import SqliteStorageProvider from typeagent.transcripts.transcript import ( @@ -30,7 +30,7 @@ async def test_incremental_index_building(): db_path = os.path.join(tmpdir, "test.db") # Create settings with test model (no API keys needed) - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False @@ -74,7 +74,7 @@ async def test_incremental_index_building(): # Second ingestion - add more messages and rebuild index print("\n=== Second ingestion ===") - test_model2 = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model2 = create_test_embedding_model() settings2 = ConversationSettings(model=test_model2) settings2.semantic_ref_index_settings.auto_extract_knowledge = False storage2 = SqliteStorageProvider( @@ -136,7 +136,7 @@ async def test_incremental_index_with_vtt_files(): db_path = os.path.join(tmpdir, "test.db") # Create settings with test model (no API keys needed) - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False @@ -161,9 +161,7 @@ async def test_incremental_index_with_vtt_files(): # Second VTT file ingestion into same database print("\n=== Import second VTT file ===") - settings2 = ConversationSettings( - model=AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) - ) + settings2 = ConversationSettings(model=create_test_embedding_model()) settings2.semantic_ref_index_settings.auto_extract_knowledge = False # Ingest the second transcript diff --git a/tests/test_interfaces.py b/tests/test_interfaces.py index d4f6b9fc..160bbdc7 100644 --- a/tests/test_interfaces.py +++ b/tests/test_interfaces.py @@ -18,7 +18,7 @@ Thread, WhenFilter, ) -from typeagent.knowpro.kplib import ConcreteEntity +from typeagent.knowpro.knowledge_schema import ConcreteEntity def test_text_location_serialization(): diff --git a/tests/test_knowledge.py b/tests/test_knowledge.py index e20ff1f2..0374dfec 100644 --- a/tests/test_knowledge.py +++ b/tests/test_knowledge.py @@ -5,7 +5,8 @@ from typechat import Failure, Result, Success -from typeagent.knowpro import convknowledge, kplib +from typeagent.knowpro import convknowledge +from typeagent.knowpro import knowledge_schema as kplib from typeagent.knowpro.knowledge import ( create_knowledge_extractor, extract_knowledge_from_text, @@ -13,7 +14,7 @@ merge_concrete_entities, merge_topics, ) -from typeagent.knowpro.kplib import ConcreteEntity, Facet +from typeagent.knowpro.knowledge_schema import ConcreteEntity, Facet class MockKnowledgeExtractor: @@ -44,12 +45,12 @@ async def test_extract_knowledge_from_text( mock_knowledge_extractor: convknowledge.KnowledgeExtractor, ): """Test extracting knowledge from a single text input.""" - result = await extract_knowledge_from_text(mock_knowledge_extractor, "test text", 3) + result = await extract_knowledge_from_text(mock_knowledge_extractor, "test text") assert isinstance(result, Success) assert result.value.topics[0] == "test text" failure_result = await extract_knowledge_from_text( - mock_knowledge_extractor, "error", 3 + mock_knowledge_extractor, "error" ) assert isinstance(failure_result, Failure) assert failure_result.message == "Extraction failed" @@ -62,7 +63,7 @@ async def test_extract_knowledge_from_text_batch( """Test extracting knowledge from a batch of text inputs.""" text_batch = ["text 1", "text 2", "error"] results = await extract_knowledge_from_text_batch( - mock_knowledge_extractor, text_batch, 2, 3 + mock_knowledge_extractor, text_batch, 2 ) assert len(results) == 3 diff --git a/tests/test_kplib.py b/tests/test_kplib.py index 0a5edf61..7b5247de 100644 --- a/tests/test_kplib.py +++ b/tests/test_kplib.py @@ -1,7 +1,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -from typeagent.knowpro.kplib import ( +from typeagent.knowpro.knowledge_schema import ( Action, ActionParam, ConcreteEntity, diff --git a/tests/test_mbox.py b/tests/test_mbox.py new file mode 100644 index 00000000..1d33f900 --- /dev/null +++ b/tests/test_mbox.py @@ -0,0 +1,210 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for email filtering logic and email parsing edge cases.""" + +from datetime import datetime, timezone + +from typeagent.emails.email_import import email_matches_date_filter, import_email_string + +# =========================================================================== +# Tests for email_matches_date_filter +# =========================================================================== + + +class TestEmailMatchesDateFilter: + """Tests for the email_matches_date_filter helper in ingest_email.py.""" + + def _utc(self, year: int, month: int, day: int) -> datetime: + return datetime(year, month, day, tzinfo=timezone.utc) + + def test_no_filters(self) -> None: + """All emails pass when no filters are set.""" + assert email_matches_date_filter("2024-01-15T10:00:00+00:00", None, None) + + def test_none_timestamp_always_passes(self) -> None: + """Emails without a timestamp are always included.""" + assert email_matches_date_filter( + None, self._utc(2024, 1, 1), self._utc(2024, 12, 31) + ) + + def test_invalid_timestamp_always_passes(self) -> None: + """Emails with unparseable timestamps are always included.""" + assert email_matches_date_filter( + "not-a-date", self._utc(2024, 1, 1), self._utc(2024, 12, 31) + ) + + def test_start_date_filter_includes(self) -> None: + """Email on or after the start_date passes.""" + start = self._utc(2024, 1, 15) + assert email_matches_date_filter("2024-01-15T00:00:00+00:00", start, None) + assert email_matches_date_filter("2024-01-16T00:00:00+00:00", start, None) + + def test_start_date_filter_excludes(self) -> None: + """Email before the start_date is excluded.""" + start = self._utc(2024, 1, 15) + assert not email_matches_date_filter("2024-01-14T23:59:59+00:00", start, None) + + def test_stop_date_filter_includes(self) -> None: + """Email before the stop_date passes.""" + stop = self._utc(2024, 2, 1) + assert email_matches_date_filter("2024-01-31T23:59:59+00:00", None, stop) + + def test_stop_date_filter_excludes(self) -> None: + """Email on or after the stop_date is excluded (exclusive upper bound).""" + stop = self._utc(2024, 2, 1) + assert not email_matches_date_filter("2024-02-01T00:00:00+00:00", None, stop) + + def test_date_range(self) -> None: + """Email within [start_date, stop_date) passes; outside fails.""" + start = self._utc(2024, 1, 1) + stop = self._utc(2024, 2, 1) + # Inside + assert email_matches_date_filter("2024-01-15T12:00:00+00:00", start, stop) + # Before range + assert not email_matches_date_filter("2023-12-31T23:59:59+00:00", start, stop) + # At upper bound (exclusive) + assert not email_matches_date_filter("2024-02-01T00:00:00+00:00", start, stop) + + def test_naive_timestamp_treated_as_local(self) -> None: + """Offset-naive timestamps should be treated as local time.""" + # Use the same naive→aware conversion the function applies internally + # so the boundary's UTC offset matches the test dates regardless of DST. + start = datetime(2024, 1, 15).astimezone() + assert email_matches_date_filter("2024-01-15T00:00:00", start, None) + assert not email_matches_date_filter("2024-01-14T23:59:59", start, None) + + def test_different_timezone(self) -> None: + """Timestamps with non-UTC offsets are compared correctly.""" + # 2024-01-15T00:00:00+05:00 is 2024-01-14T19:00:00 UTC + start = self._utc(2024, 1, 15) + assert not email_matches_date_filter("2024-01-15T00:00:00+05:00", start, None) + # 2024-01-15T10:00:00+05:00 is 2024-01-15T05:00:00 UTC + assert email_matches_date_filter("2024-01-15T10:00:00+05:00", start, None) + + +# =========================================================================== +# Tests for email encoding edge cases +# =========================================================================== + + +_EMAIL_WITH_ENCODED_HEADER = """\ +From: =?utf-8?b?SsO8cmdlbg==?= +To: recipient@example.com +Subject: =?utf-8?q?M=C3=BCnchen_weather?= +Date: Mon, 01 Jan 2024 10:00:00 +0000 +Message-ID: + +Hello from Munich! +""" + + +class TestEncodingEdgeCases: + def test_encoded_header_sender(self) -> None: + """RFC 2047 encoded sender should be decoded to a string, not raise.""" + email = import_email_string(_EMAIL_WITH_ENCODED_HEADER) + assert isinstance(email.metadata.sender, str) + + def test_encoded_header_subject(self) -> None: + """RFC 2047 encoded subject should be decoded to a string.""" + email = import_email_string(_EMAIL_WITH_ENCODED_HEADER) + assert isinstance(email.metadata.subject, str) + + +_EMAIL_WITH_UNKNOWN_CHARSET = """\ +From: test@example.com +To: recipient@example.com +Subject: Unknown charset test +Date: Mon, 01 Jan 2024 10:00:00 +0000 +Message-ID: +MIME-Version: 1.0 +Content-Type: text/plain; charset="iso-8859-8-i" +Content-Transfer-Encoding: base64 + +SGVsbG8gV29ybGQ= +""" + + +class TestUnknownCharset: + def test_unknown_charset_does_not_crash(self) -> None: + """An email with an unknown charset should be decoded without raising.""" + email = import_email_string(_EMAIL_WITH_UNKNOWN_CHARSET) + body = " ".join(email.text_chunks) + assert "Hello World" in body or len(body) > 0 + + +# =========================================================================== +# Tests for mbox with missing / malformed date +# =========================================================================== + +_EMAIL_NO_DATE = """\ +From: test@example.com +To: recipient@example.com +Subject: No date header +Message-ID: + +This email has no Date header. +""" + + +class TestMissingDate: + def test_email_without_date_has_none_timestamp(self) -> None: + email = import_email_string(_EMAIL_NO_DATE) + assert email.timestamp is None + + def test_email_without_date_passes_date_filter(self) -> None: + """Emails without timestamps should always pass the date filter.""" + assert email_matches_date_filter( + None, datetime(2024, 1, 1, tzinfo=timezone.utc), None + ) + + +# =========================================================================== +# Tests for import_email_string and import_email_message edge cases +# =========================================================================== + +_SIMPLE_EMAIL = """\ +From: alice@example.com +To: bob@example.com +Subject: Test +Date: Mon, 01 Jan 2024 10:00:00 +0000 +Message-ID: + +Hello Bob! +""" + +_MULTIPART_EMAIL = """\ +From: alice@example.com +To: bob@example.com +Subject: Multipart +Date: Mon, 01 Jan 2024 10:00:00 +0000 +MIME-Version: 1.0 +Content-Type: multipart/alternative; boundary="boundary" + +--boundary +Content-Type: text/plain + +Plain text body +--boundary +Content-Type: text/html + +

HTML body

+--boundary-- +""" + + +class TestImportEmailString: + def test_simple_email(self) -> None: + email = import_email_string(_SIMPLE_EMAIL) + assert "alice@example.com" in email.metadata.sender + assert email.metadata.subject is not None + assert "Test" in email.metadata.subject + assert email.metadata.id == "" + assert email.timestamp is not None + assert len(email.text_chunks) > 0 + + def test_multipart_email(self) -> None: + email = import_email_string(_MULTIPART_EMAIL) + # Should extract the plain text part + body = " ".join(email.text_chunks) + assert "Plain text body" in body diff --git a/tests/test_mcp_server.py b/tests/test_mcp_server.py index 03fd0e69..7e3bd3b7 100644 --- a/tests/test_mcp_server.py +++ b/tests/test_mcp_server.py @@ -1,18 +1,39 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -"""End-to-end tests for the MCP server.""" +"""End-to-end and unit tests for the MCP server.""" +import json import os import sys -from typing import Any +from typing import Any, cast +from unittest.mock import AsyncMock, MagicMock import pytest -from mcp import StdioServerParameters +from mcp import ClientSession, StdioServerParameters from mcp.client.session import ClientSession as ClientSessionType +from mcp.client.stdio import stdio_client from mcp.shared.context import RequestContext -from mcp.types import CreateMessageRequestParams, CreateMessageResult, TextContent +from mcp.types import ( + CreateMessageRequestParams, + CreateMessageResult, + SamplingMessage, + TextContent, +) +import typechat + +from typeagent.aitools.model_adapters import create_chat_model +from typeagent.knowpro import answers, searchlang +from typeagent.knowpro.answer_response_schema import AnswerResponse +from typeagent.knowpro.convsettings import ConversationSettings +import typeagent.mcp.server as typeagent_mcp_server +from typeagent.mcp.server import ( + load_podcast_database_or_index, + MCPTypeChatModel, + ProcessingContext, + QuestionResponse, +) from conftest import EPISODE_53_INDEX @@ -38,50 +59,32 @@ async def sampling_callback( params: CreateMessageRequestParams, ) -> CreateMessageResult: """Sampling callback that uses OpenAI to generate responses.""" - # Use OpenAI to generate a response - from openai.types.chat import ChatCompletionMessageParam + model = create_chat_model() - from typeagent.aitools.utils import create_async_openai_client - - client = create_async_openai_client() - - # Convert MCP SamplingMessage to OpenAI format - messages: list[ChatCompletionMessageParam] = [] + # Convert MCP SamplingMessage to TypeChat PromptSection list + sections: list[typechat.PromptSection] = [] + if params.systemPrompt: + sections.append({"role": "system", "content": params.systemPrompt}) for msg in params.messages: - # Handle TextContent - content: str if isinstance(msg.content, TextContent): content = msg.content.text else: raise ValueError( f"Unsupported content type in sampling message: {type(msg.content)}" ) + role = "user" if msg.role == "user" else "assistant" + sections.append({"role": role, "content": content}) - # MCP roles are "user" or "assistant", which are compatible with OpenAI - if msg.role == "user": - messages.append({"role": "user", "content": content}) - else: - messages.append({"role": "assistant", "content": content}) - - # Add system prompt if provided - if params.systemPrompt: - messages.insert(0, {"role": "system", "content": params.systemPrompt}) - - # Call OpenAI - response = await client.chat.completions.create( - model="gpt-4o", - messages=messages, - max_tokens=params.maxTokens, - temperature=params.temperature if params.temperature is not None else 1.0, - ) + result = await model.complete(sections) + if isinstance(result, typechat.Success): + text = result.value + else: + text = result.message - # Convert response to MCP format return CreateMessageResult( role="assistant", - content=TextContent( - type="text", text=response.choices[0].message.content or "" - ), - model=response.model, + content=TextContent(type="text", text=text), + model="gpt-4o", stopReason="endTurn", ) @@ -91,9 +94,6 @@ async def test_mcp_server_query_conversation_slow( really_needs_auth, server_params: StdioServerParameters ): """Test the query_conversation tool end-to-end using MCP client.""" - from mcp import ClientSession - from mcp.client.stdio import stdio_client - # Pass through environment variables needed for authentication # otherwise this test will fail in the CI on Windows only if not (server_params.env) is None: @@ -135,8 +135,6 @@ async def test_mcp_server_query_conversation_slow( response_text = content_item.text # Parse response (it should be JSON with success, answer, time_used) - import json - try: response_data = json.loads(response_text) except json.JSONDecodeError as e: @@ -158,9 +156,6 @@ async def test_mcp_server_query_conversation_slow( @pytest.mark.asyncio async def test_mcp_server_empty_question(server_params: StdioServerParameters): """Test the query_conversation tool with an empty question.""" - from mcp import ClientSession - from mcp.client.stdio import stdio_client - # Create client session and connect to server async with stdio_client(server_params) as (read, write): async with ClientSession( @@ -183,8 +178,262 @@ async def test_mcp_server_empty_question(server_params: StdioServerParameters): assert isinstance(content_item, TextContent) response_text = content_item.text - import json - response_data = json.loads(response_text) assert response_data["success"] is False assert "No question provided" in response_data["answer"] + + +# --------------------------------------------------------------------------- +# Unit tests (formerly in test_mcp_server_unit.py) +# --------------------------------------------------------------------------- + +# Coverage import guard — tested implicitly (the module loads at all +# without `coverage` installed). We just verify the guard didn't break the +# import. + + +def test_server_module_imports() -> None: + """Importing the server module should not raise even without coverage.""" + assert hasattr(typeagent_mcp_server, "mcp") # The FastMCP instance exists + + +# --------------------------------------------------------------------------- +# PromptSection role mapping ("system" → "assistant") +# --------------------------------------------------------------------------- + + +class TestMCPTypeChatModelRoleMapping: + """Verify that PromptSection roles are mapped correctly to MCP roles.""" + + @staticmethod + def _make_model() -> tuple[MCPTypeChatModel, AsyncMock]: + session = AsyncMock() + # create_message returns a result with TextContent + session.create_message.return_value = AsyncMock( + content=TextContent(type="text", text="response") + ) + model = MCPTypeChatModel(session) + return model, session + + @pytest.mark.asyncio + async def test_string_prompt_becomes_user_message(self) -> None: + model, session = self._make_model() + await model.complete("hello") + + call_args = session.create_message.call_args + messages: list[SamplingMessage] = call_args.kwargs["messages"] + assert len(messages) == 1 + assert messages[0].role == "user" + assert isinstance(messages[0].content, TextContent) + assert messages[0].content.text == "hello" + + @pytest.mark.asyncio + async def test_user_role_preserved(self) -> None: + model, session = self._make_model() + sections: list[typechat.PromptSection] = [ + {"role": "user", "content": "question"}, + ] + await model.complete(sections) + + messages: list[SamplingMessage] = session.create_message.call_args.kwargs[ + "messages" + ] + assert messages[0].role == "user" + + @pytest.mark.asyncio + async def test_assistant_role_preserved(self) -> None: + model, session = self._make_model() + sections: list[typechat.PromptSection] = [ + {"role": "assistant", "content": "context"}, + ] + await model.complete(sections) + + messages: list[SamplingMessage] = session.create_message.call_args.kwargs[ + "messages" + ] + assert messages[0].role == "assistant" + + @pytest.mark.asyncio + async def test_system_role_mapped_to_assistant(self) -> None: + """System role doesn't exist in MCP SamplingMessage; it must be mapped.""" + model, session = self._make_model() + sections: list[typechat.PromptSection] = [ + {"role": "system", "content": "instructions"}, + {"role": "user", "content": "question"}, + ] + await model.complete(sections) + + messages: list[SamplingMessage] = session.create_message.call_args.kwargs[ + "messages" + ] + assert messages[0].role == "assistant" # "system" → "assistant" + assert messages[1].role == "user" + + @pytest.mark.asyncio + async def test_mixed_roles_order(self) -> None: + model, session = self._make_model() + sections: list[typechat.PromptSection] = [ + {"role": "system", "content": "sys"}, + {"role": "user", "content": "usr"}, + {"role": "assistant", "content": "asst"}, + ] + await model.complete(sections) + + messages: list[SamplingMessage] = session.create_message.call_args.kwargs[ + "messages" + ] + assert [m.role for m in messages] == ["assistant", "user", "assistant"] + + @pytest.mark.asyncio + async def test_exception_returns_failure(self) -> None: + model, session = self._make_model() + session.create_message.side_effect = RuntimeError("boom") + result = await model.complete("test") + assert isinstance(result, typechat.Failure) + assert "boom" in result.message + + @pytest.mark.asyncio + async def test_text_content_returns_success(self) -> None: + model, _ = self._make_model() + result = await model.complete("test") + assert isinstance(result, typechat.Success) + assert result.value == "response" + + @pytest.mark.asyncio + async def test_list_content_returns_joined(self) -> None: + model, session = self._make_model() + session.create_message.return_value = AsyncMock( + content=[ + TextContent(type="text", text="part1"), + TextContent(type="text", text="part2"), + ] + ) + result = await model.complete("test") + assert isinstance(result, typechat.Success) + assert result.value == "part1\npart2" + + +# --------------------------------------------------------------------------- +# match statement default case in query_conversation +# --------------------------------------------------------------------------- + + +class TestQuestionResponseMatchDefault: + """The match on combined_answer.type must handle unexpected types.""" + + def test_known_types(self) -> None: + """QuestionResponse can represent success and failure.""" + ok = QuestionResponse(success=True, answer="yes", time_used=42) + assert ok.success is True + fail = QuestionResponse(success=False, answer="no", time_used=0) + assert fail.success is False + + def test_answer_type_coverage(self) -> None: + """AnswerResponse.type should only be 'Answered' or 'NoAnswer'.""" + answered = AnswerResponse(type="Answered", answer="yes") + assert answered.type == "Answered" + no_answer = AnswerResponse(type="NoAnswer", why_no_answer="dunno") + assert no_answer.type == "NoAnswer" + + +@pytest.mark.asyncio +async def test_sampling_callback_delegates_to_chat_model( + monkeypatch: pytest.MonkeyPatch, +) -> None: + """sampling_callback should delegate to create_chat_model().complete().""" + fake_model = AsyncMock() + fake_model.complete.return_value = typechat.Success("response") + + monkeypatch.setattr( + sys.modules[__name__], + "create_chat_model", + lambda: fake_model, + ) + + params = CreateMessageRequestParams( + messages=[ + SamplingMessage( + role="user", + content=TextContent(type="text", text="hello"), + ) + ], + maxTokens=32, + ) + + result = await sampling_callback( + cast(RequestContext[ClientSessionType, Any, Any], None), + params, + ) + + fake_model.complete.assert_awaited_once() + call_args = fake_model.complete.call_args[0][0] + assert call_args == [{"role": "user", "content": "hello"}] + assert isinstance(result.content, TextContent) + assert result.content.text == "response" + + +# --------------------------------------------------------------------------- +# MCPTypeChatModel — additional response format coverage +# --------------------------------------------------------------------------- + + +class TestMCPTypeChatModelResponseFormats: + @staticmethod + def _make_model_with_result(content: Any) -> MCPTypeChatModel: + session = AsyncMock() + session.create_message.return_value = AsyncMock(content=content) + return MCPTypeChatModel(session) + + @pytest.mark.asyncio + async def test_list_content_no_text_items_returns_failure(self) -> None: + """A list response with no TextContent items should return Failure.""" + # Use a non-TextContent item type (ImageContent would work but we mock with a dict) + model = self._make_model_with_result([]) + result = await model.complete("test") + assert isinstance(result, typechat.Failure) + assert "No text content" in result.message + + @pytest.mark.asyncio + async def test_unknown_content_type_returns_failure(self) -> None: + """A response with an unrecognized content type should return Failure.""" + # Simulate some unknown object that is neither TextContent nor list + model = self._make_model_with_result(42) + result = await model.complete("test") + assert isinstance(result, typechat.Failure) + assert "No text content" in result.message + + +# --------------------------------------------------------------------------- +# ProcessingContext.__repr__ +# --------------------------------------------------------------------------- + + +class TestProcessingContextRepr: + def test_repr_contains_options(self) -> None: + lang_opts = searchlang.LanguageSearchOptions(max_message_matches=10) + ctx_opts = answers.AnswerContextOptions(entities_top_k=5) + + proc = ProcessingContext( + lang_search_options=lang_opts, + answer_context_options=ctx_opts, + query_context=MagicMock(), + embedding_model=MagicMock(), + query_translator=MagicMock(), + answer_translator=MagicMock(), + ) + r = repr(proc) + assert r.startswith("Context(") + assert "LanguageSearchOptions" in r + + +# --------------------------------------------------------------------------- +# load_podcast_database_or_index — ValueError path +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_load_podcast_no_args_raises() -> None: + """Passing neither dbname nor podcast_index must raise ValueError.""" + settings = ConversationSettings() + with pytest.raises(ValueError, match="Either --database or --podcast-index"): + await load_podcast_database_or_index(settings, dbname=None, podcast_index=None) diff --git a/tests/test_memory_semrefindex.py b/tests/test_memory_semrefindex.py new file mode 100644 index 00000000..723cdd41 --- /dev/null +++ b/tests/test_memory_semrefindex.py @@ -0,0 +1,201 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for storage/memory/semrefindex.py helper functions.""" + +import pytest + +from typeagent.knowpro import knowledge_schema as kplib +from typeagent.knowpro.interfaces import Topic +from typeagent.storage.memory import MemorySemanticRefCollection +from typeagent.storage.memory.semrefindex import ( + add_action, + add_entity, + add_facet, + add_term_to_index, + add_topic, +) + +from conftest import FakeTermIndex + + +def make_semrefs() -> MemorySemanticRefCollection: + return MemorySemanticRefCollection([]) + + +def make_index() -> FakeTermIndex: + return FakeTermIndex() + + +# --------------------------------------------------------------------------- +# add_term_to_index +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_add_term_to_index_basic() -> None: + index = make_index() + terms_added: set[str] = set() + await add_term_to_index(index, "hello", 0, terms_added) + assert "hello" in terms_added + assert await index.size() == 1 + + +@pytest.mark.asyncio +async def test_add_term_to_index_no_terms_added_set() -> None: + index = make_index() + await add_term_to_index(index, "world", 1) + assert await index.size() == 1 + + +@pytest.mark.asyncio +async def test_add_term_empty_string_is_stored() -> None: + """The function does not filter empty terms — delegated to the index.""" + index = make_index() + await add_term_to_index(index, "", 0) + # FakeTermIndex stores empty strings too + assert await index.size() == 1 + + +# --------------------------------------------------------------------------- +# add_facet +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_add_facet_none_does_nothing() -> None: + index = make_index() + await add_facet(None, 0, index) + assert await index.size() == 0 + + +@pytest.mark.asyncio +async def test_add_facet_string_value() -> None: + index = make_index() + facet = kplib.Facet(name="colour", value="red") + await add_facet(facet, 0, index) + terms = await index.get_terms() + assert "colour" in terms + assert "red" in terms + + +@pytest.mark.asyncio +async def test_add_facet_numeric_value() -> None: + index = make_index() + facet = kplib.Facet(name="count", value=42.0) + await add_facet(facet, 0, index) + terms = await index.get_terms() + assert "count" in terms + assert "42.0" in terms + + +# --------------------------------------------------------------------------- +# add_entity +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_add_entity_registers_name_and_types() -> None: + semrefs = make_semrefs() + index = make_index() + entity = kplib.ConcreteEntity(name="Alice", type=["person", "employee"]) + terms_added: set[str] = set() + await add_entity( + entity, + semrefs, + index, + message_ordinal=0, + chunk_ordinal=0, + terms_added=terms_added, + ) + assert "Alice" in terms_added + assert "person" in terms_added + assert "employee" in terms_added + assert await semrefs.size() == 1 + + +@pytest.mark.asyncio +async def test_add_entity_with_facets() -> None: + semrefs = make_semrefs() + index = make_index() + entity = kplib.ConcreteEntity( + name="Book", + type=["item"], + facets=[kplib.Facet(name="genre", value="fiction")], + ) + await add_entity(entity, semrefs, index, message_ordinal=1, chunk_ordinal=0) + terms = await index.get_terms() + assert "genre" in terms + assert "fiction" in terms + + +# --------------------------------------------------------------------------- +# add_topic +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_add_topic_registers_text() -> None: + semrefs = make_semrefs() + index = make_index() + topic = Topic(text="machine learning") + terms_added: set[str] = set() + await add_topic( + topic, + semrefs, + index, + message_ordinal=2, + chunk_ordinal=0, + terms_added=terms_added, + ) + assert "machine learning" in terms_added + assert await semrefs.size() == 1 + + +# --------------------------------------------------------------------------- +# add_action +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_add_action_registers_verbs() -> None: + semrefs = make_semrefs() + index = make_index() + action = kplib.Action( + verbs=["run", "execute"], + verb_tense="present", + subject_entity_name="Alice", + object_entity_name="script", + indirect_object_entity_name="none", + ) + terms_added: set[str] = set() + await add_action( + action, + semrefs, + index, + message_ordinal=0, + chunk_ordinal=0, + terms_added=terms_added, + ) + terms = set(await index.get_terms()) + assert "run execute" in terms + assert "Alice" in terms + assert "script" in terms + assert await semrefs.size() == 1 + + +@pytest.mark.asyncio +async def test_add_action_none_entities_skipped() -> None: + semrefs = make_semrefs() + index = make_index() + action = kplib.Action( + verbs=["go"], + verb_tense="present", + subject_entity_name="none", + object_entity_name="none", + indirect_object_entity_name="none", + ) + await add_action(action, semrefs, index, message_ordinal=0, chunk_ordinal=0) + terms = await index.get_terms() + assert "none" not in terms + assert "go" in terms diff --git a/tests/test_message_text_index_population.py b/tests/test_message_text_index_population.py index 457ef7e3..13d53c00 100644 --- a/tests/test_message_text_index_population.py +++ b/tests/test_message_text_index_population.py @@ -7,10 +7,10 @@ import os import tempfile +from dotenv import load_dotenv import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME -from typeagent.aitools.utils import load_dotenv +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, @@ -30,7 +30,7 @@ async def test_message_text_index_population_from_database(): try: # Use the test model that's already configured in the system - embedding_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + embedding_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(embedding_model) message_text_settings = MessageTextIndexSettings(embedding_settings) related_terms_settings = RelatedTermIndexSettings(embedding_settings) diff --git a/tests/test_message_text_index_serialization.py b/tests/test_message_text_index_serialization.py index 002cd877..4504bf42 100644 --- a/tests/test_message_text_index_serialization.py +++ b/tests/test_message_text_index_serialization.py @@ -8,7 +8,7 @@ import numpy as np import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, TextEmbeddingIndexSettings, @@ -28,18 +28,14 @@ def sqlite_db(self) -> sqlite3.Connection: # Add test messages to the database cursor = db.cursor() - cursor.execute( - """ + cursor.execute(""" INSERT INTO Messages (msg_id, chunks, extra, tags, metadata) VALUES (1, '["First test message", "Second chunk"]', '{}', '[]', '{}') - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO Messages (msg_id, chunks, extra, tags, metadata) VALUES (2, '["Another message"]', '{}', '[]', '{}') - """ - ) + """) db.commit() return db @@ -48,7 +44,7 @@ def sqlite_db(self) -> sqlite3.Connection: async def test_message_text_index_serialize_not_empty( self, sqlite_db: sqlite3.Connection, - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, needs_auth: None, ): """Test that MessageTextIndex serialization produces non-empty data when populated.""" @@ -115,7 +111,7 @@ async def test_message_text_index_serialize_not_empty( async def test_message_text_index_deserialize_restores_data( self, sqlite_db: sqlite3.Connection, - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, needs_auth: None, ): """Test that MessageTextIndex deserialization actually restores data.""" diff --git a/tests/test_messageindex.py b/tests/test_messageindex.py index b4e40dd6..7e00cc45 100644 --- a/tests/test_messageindex.py +++ b/tests/test_messageindex.py @@ -4,6 +4,7 @@ from typing import cast from unittest.mock import AsyncMock, MagicMock +import numpy as np import pytest from typeagent.knowpro.convsettings import MessageTextIndexSettings @@ -42,10 +43,10 @@ def message_text_index( mock_text_location_index: MagicMock, ) -> IMessageTextEmbeddingIndex: """Fixture to create a MessageTextIndex instance with a mocked TextToTextLocationIndex.""" - from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME + from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(test_model) settings = MessageTextIndexSettings(embedding_settings) index = MessageTextIndex(settings) @@ -55,10 +56,10 @@ def message_text_index( def test_message_text_index_init(needs_auth: None): """Test initialization of MessageTextIndex.""" - from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME + from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(test_model) settings = MessageTextIndexSettings(embedding_settings) index = MessageTextIndex(settings) @@ -145,13 +146,11 @@ async def test_lookup_messages_in_subset( @pytest.mark.asyncio async def test_generate_embedding(needs_auth: None): """Test generating an embedding for a message without mocking.""" - import numpy as np - - from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME + from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings # Create real MessageTextIndex with test model - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(test_model) settings = MessageTextIndexSettings(embedding_settings) index = MessageTextIndex(settings) @@ -159,7 +158,7 @@ async def test_generate_embedding(needs_auth: None): embedding = await index.generate_embedding("test message") assert embedding is not None - assert len(embedding) == test_model.embedding_size # 3 for test model + assert len(embedding) == 3 # test model uses embedding size 3 dot = float(np.dot(embedding, embedding)) assert abs(dot - 1.0) < 1e-6, f"Embedding not normalized: {dot}" @@ -205,14 +204,14 @@ async def test_build_message_index(needs_auth: None): ] # Create storage provider asynchronously - from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME + from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, RelatedTermIndexSettings, ) - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(test_model) message_text_settings = MessageTextIndexSettings(embedding_settings) related_terms_settings = RelatedTermIndexSettings(embedding_settings) diff --git a/tests/test_messageutils.py b/tests/test_messageutils.py new file mode 100644 index 00000000..97c61c13 --- /dev/null +++ b/tests/test_messageutils.py @@ -0,0 +1,23 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +from typeagent.knowpro.interfaces import TextLocation, TextRange +from typeagent.knowpro.messageutils import ( + text_range_from_message_chunk, +) + + +class TestTextRangeFromMessageChunk: + def test_default_chunk_ordinal(self) -> None: + tr = text_range_from_message_chunk(message_ordinal=3) + assert tr.start == TextLocation(3, 0) + assert tr.end is None + + def test_explicit_chunk_ordinal(self) -> None: + tr = text_range_from_message_chunk(message_ordinal=5, chunk_ordinal=2) + assert tr.start == TextLocation(5, 2) + assert tr.end is None + + def test_returns_text_range(self) -> None: + tr = text_range_from_message_chunk(0) + assert isinstance(tr, TextRange) diff --git a/tests/test_model_adapters.py b/tests/test_model_adapters.py new file mode 100644 index 00000000..eaf36f88 --- /dev/null +++ b/tests/test_model_adapters.py @@ -0,0 +1,270 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +from unittest.mock import AsyncMock + +import numpy as np +import pytest + +from pydantic_ai import Embedder +from pydantic_ai.embeddings import EmbeddingResult +from pydantic_ai.messages import ( + ModelResponse, + SystemPromptPart, + TextPart, + UserPromptPart, +) +from pydantic_ai.models import Model +import typechat + +from typeagent.aitools import model_adapters +from typeagent.aitools.embeddings import CachingEmbeddingModel, NormalizedEmbedding +from typeagent.aitools.model_adapters import ( + configure_models, + create_chat_model, + create_embedding_model, + PydanticAIChatModel, + PydanticAIEmbedder, +) + +# --------------------------------------------------------------------------- +# Spec format +# --------------------------------------------------------------------------- + + +def test_spec_uses_colon_separator() -> None: + """Specs use ``provider:model`` format matching pydantic_ai conventions.""" + with pytest.raises(Exception): + # A nonsense provider should fail + create_chat_model("nonexistent_provider_xyz:fake-model") + + +# --------------------------------------------------------------------------- +# PydanticAIChatModel adapter +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_chat_adapter_complete() -> None: + """PydanticAIChatModel wraps a pydantic_ai Model.""" + mock_model = AsyncMock(spec=Model) + mock_model.request.return_value = ModelResponse(parts=[TextPart(content="hello")]) + + adapter = PydanticAIChatModel(mock_model) + result = await adapter.complete("test prompt") + assert isinstance(result, typechat.Success) + assert result.value == "hello" + + +@pytest.mark.asyncio +async def test_chat_adapter_prompt_sections() -> None: + """PydanticAIChatModel handles list[PromptSection] prompts.""" + mock_model = AsyncMock(spec=Model) + mock_model.request.return_value = ModelResponse( + parts=[TextPart(content="response")] + ) + + adapter = PydanticAIChatModel(mock_model) + sections: list[typechat.PromptSection] = [ + {"role": "system", "content": "You are helpful."}, + {"role": "user", "content": "Hello"}, + ] + result = await adapter.complete(sections) + assert isinstance(result, typechat.Success) + assert result.value == "response" + + # Verify the request was called with proper message structure + call_args = mock_model.request.call_args + messages = call_args[0][0] + assert len(messages) == 1 + request = messages[0] + assert isinstance(request.parts[0], SystemPromptPart) + assert isinstance(request.parts[1], UserPromptPart) + + +# --------------------------------------------------------------------------- +# PydanticAIEmbedder adapter +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_embedding_adapter_single() -> None: + """PydanticAIEmbedder computes a single normalized embedding.""" + mock_embedder = AsyncMock(spec=Embedder) + raw_vec = [3.0, 4.0, 0.0] + mock_embedder.embed_documents.return_value = EmbeddingResult( + embeddings=[raw_vec], + inputs=["test"], + input_type="document", + model_name="test-model", + provider_name="test", + ) + + adapter = PydanticAIEmbedder(mock_embedder, "test-model") + result = await adapter.get_embedding_nocache("test") + assert result.shape == (3,) + norm = float(np.linalg.norm(result)) + assert abs(norm - 1.0) < 1e-6 + + +@pytest.mark.asyncio +async def test_embedding_adapter_empty_batch_raises() -> None: + """Empty batch raises ValueError.""" + mock_embedder = AsyncMock(spec=Embedder) + adapter = PydanticAIEmbedder(mock_embedder, "test-model") + with pytest.raises(ValueError, match="Cannot embed an empty list"): + await adapter.get_embeddings_nocache([]) + + +@pytest.mark.asyncio +async def test_embedding_adapter_batch() -> None: + """PydanticAIEmbedder computes batch embeddings.""" + mock_embedder = AsyncMock(spec=Embedder) + mock_embedder.embed_documents.return_value = EmbeddingResult( + embeddings=[[1.0, 0.0], [0.0, 1.0]], + inputs=["a", "b"], + input_type="document", + model_name="test-model", + provider_name="test", + ) + + adapter = PydanticAIEmbedder(mock_embedder, "test-model") + result = await adapter.get_embeddings_nocache(["a", "b"]) + assert result.shape == (2, 2) + + +@pytest.mark.asyncio +async def test_embedding_adapter_caching() -> None: + """CachingEmbeddingModel avoids re-computing embeddings.""" + mock_embedder = AsyncMock(spec=Embedder) + mock_embedder.embed_documents.return_value = EmbeddingResult( + embeddings=[[1.0, 0.0, 0.0]], + inputs=["cached"], + input_type="document", + model_name="test-model", + provider_name="test", + ) + + embedder = PydanticAIEmbedder(mock_embedder, "test-model") + adapter = CachingEmbeddingModel(embedder) + first = await adapter.get_embedding("cached") + second = await adapter.get_embedding("cached") + np.testing.assert_array_equal(first, second) + # embed_documents() should only be called once + assert mock_embedder.embed_documents.call_count == 1 + + +@pytest.mark.asyncio +async def test_embedding_adapter_add_embedding() -> None: + """add_embedding() populates the cache.""" + mock_embedder = AsyncMock(spec=Embedder) + embedder = PydanticAIEmbedder(mock_embedder, "test-model") + adapter = CachingEmbeddingModel(embedder) + vec: NormalizedEmbedding = np.array([1.0, 0.0, 0.0], dtype=np.float32) + adapter.add_embedding("key", vec) + result = await adapter.get_embedding("key") + np.testing.assert_array_equal(result, vec) + # No embed_documents() call needed + mock_embedder.embed_documents.assert_not_called() + + +@pytest.mark.asyncio +async def test_embedding_adapter_empty_batch_returns_empty() -> None: + """Empty batch via CachingEmbeddingModel raises ValueError.""" + mock_embedder = AsyncMock(spec=Embedder) + embedder = PydanticAIEmbedder(mock_embedder, "test-model") + adapter = CachingEmbeddingModel(embedder) + with pytest.raises(ValueError, match="Cannot embed an empty list"): + await adapter.get_embeddings([]) + + +# --------------------------------------------------------------------------- +# configure_models +# --------------------------------------------------------------------------- + + +def test_configure_models_returns_correct_types( + monkeypatch: pytest.MonkeyPatch, +) -> None: + """configure_models creates both adapters.""" + monkeypatch.setenv("OPENAI_API_KEY", "test-key") + chat, embedder = configure_models("openai:gpt-4o", "openai:text-embedding-3-small") + assert isinstance(chat, PydanticAIChatModel) + assert isinstance(embedder, CachingEmbeddingModel) + + +def test_create_embedding_model_uses_azure_deployment_name( + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Azure embedding endpoints contribute the deployment name.""" + captured: dict[str, object] = {} + provider = object() + + class FakeOpenAIEmbeddingModel: + def __init__(self, model_name: str, provider: object) -> None: + captured["azure_model_name"] = model_name + captured["provider"] = provider + + monkeypatch.delenv("OPENAI_API_KEY", raising=False) + monkeypatch.delenv("OPENAI_EMBEDDING_MODEL", raising=False) + monkeypatch.setenv("AZURE_OPENAI_API_KEY", "test-key") + monkeypatch.setenv( + "AZURE_OPENAI_ENDPOINT_EMBEDDING", + "https://myhost.openai.azure.com/openai/deployments/ada-002/embeddings?api-version=2025-01-01-preview", + ) + monkeypatch.setattr( + model_adapters, + "_make_azure_provider", + lambda endpoint_envvar, api_key_envvar: provider, + ) + monkeypatch.setattr( + "pydantic_ai.embeddings.openai.OpenAIEmbeddingModel", FakeOpenAIEmbeddingModel + ) + monkeypatch.setattr( + model_adapters, "_PydanticAIEmbedder", lambda embedding_model: embedding_model + ) + + embedder = create_embedding_model() + + assert isinstance(embedder, CachingEmbeddingModel) + assert captured["azure_model_name"] == "ada-002" + assert captured["provider"] is provider + assert embedder.model_name == "text-embedding-ada-002" + + +def test_create_chat_model_uses_azure_deployment_name( + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Azure chat endpoints contribute the deployment name.""" + captured: dict[str, object] = {} + provider = object() + + class FakeOpenAIChatModel: + def __init__(self, model_name: str, provider: object) -> None: + captured["azure_model_name"] = model_name + captured["provider"] = provider + + async def request(self, *args: object, **kwargs: object) -> ModelResponse: + raise AssertionError("request() should not be called in this test") + + monkeypatch.delenv("OPENAI_API_KEY", raising=False) + monkeypatch.delenv("OPENAI_MODEL", raising=False) + monkeypatch.setenv("AZURE_OPENAI_API_KEY", "test-key") + monkeypatch.setenv( + "AZURE_OPENAI_ENDPOINT", + "https://myhost.openai.azure.com/openai/deployments/gpt-4o-2/chat/completions?api-version=2025-01-01-preview", + ) + monkeypatch.setattr( + model_adapters, + "_make_azure_provider", + lambda endpoint_envvar="AZURE_OPENAI_ENDPOINT", api_key_envvar="AZURE_OPENAI_API_KEY": provider, + ) + monkeypatch.setattr( + "pydantic_ai.models.openai.OpenAIChatModel", FakeOpenAIChatModel + ) + + chat_model = create_chat_model() + + assert isinstance(chat_model, PydanticAIChatModel) + assert captured["azure_model_name"] == "gpt-4o-2" + assert captured["provider"] is provider diff --git a/tests/test_online.py b/tests/test_online.py index c354c36e..1db50e5e 100644 --- a/tests/test_online.py +++ b/tests/test_online.py @@ -3,36 +3,24 @@ import pytest -from typeagent.aitools.utils import create_async_openai_client +import typechat + +from typeagent.aitools.model_adapters import create_chat_model @pytest.mark.asyncio async def test_why_is_sky_blue(really_needs_auth: None): - """Test that chat agent responds correctly to 'why is the sky blue?'""" - - # Create an async OpenAI client - try: - client = create_async_openai_client() - except RuntimeError as e: - if "Neither OPENAI_API_KEY nor AZURE_OPENAI_API_KEY was provided." in str(e): - pytest.skip("API keys not configured") - raise - - # Send the user request - response = await client.chat.completions.create( - model="gpt-4o", - messages=[ - { - "role": "user", - "content": "why is the sky blue?", - } - ], - temperature=0, - ) - - # Get the response message - msg = response.choices[0].message.content - assert msg is not None, "Chat agent didn't respond" + """Test that chat agent responds correctly to 'why is the sky blue?' + + Uses create_chat_model (the pydantic-ai code path) so this test exercises + the same Azure provider wiring as the rest of the codebase. + """ + model = create_chat_model() + + result = await model.complete("why is the sky blue?") + assert isinstance(result, typechat.Success), f"Chat completion failed: {result}" + msg = result.value + assert msg, "Chat agent didn't respond" print(f"Chat agent response: {msg}") diff --git a/tests/test_podcast_incremental.py b/tests/test_podcast_incremental.py index 92d5ad32..4b1732d6 100644 --- a/tests/test_podcast_incremental.py +++ b/tests/test_podcast_incremental.py @@ -8,7 +8,7 @@ import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.podcasts.podcast import Podcast, PodcastMessage, PodcastMessageMeta from typeagent.storage.sqlite.provider import SqliteStorageProvider @@ -20,7 +20,7 @@ async def test_podcast_add_messages_with_indexing(): with tempfile.TemporaryDirectory() as tmpdir: db_path = os.path.join(tmpdir, "test.db") - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False @@ -57,7 +57,7 @@ async def test_podcast_add_messages_batched(): with tempfile.TemporaryDirectory() as tmpdir: db_path = os.path.join(tmpdir, "test.db") - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) settings.semantic_ref_index_settings.auto_extract_knowledge = False diff --git a/tests/test_podcasts.py b/tests/test_podcasts.py index 6f901a75..d77f6ba3 100644 --- a/tests/test_podcasts.py +++ b/tests/test_podcasts.py @@ -1,12 +1,16 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +import asyncio from datetime import timezone import os import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typechat import Result, Success + +from typeagent.aitools.embeddings import IEmbeddingModel +from typeagent.knowpro import knowledge_schema as kplib from typeagent.knowpro.convsettings import ConversationSettings from typeagent.knowpro.interfaces import Datetime from typeagent.knowpro.serialization import DATA_FILE_SUFFIX, EMBEDDING_FILE_SUFFIX @@ -16,9 +20,34 @@ from conftest import FAKE_PODCAST_TXT +class TrackingKnowledgeExtractor: + def __init__(self, delay: float = 0.01) -> None: + self.delay = delay + self.current_concurrency = 0 + self.max_concurrency = 0 + self.started_texts: list[str] = [] + + async def extract(self, message: str) -> Result[kplib.KnowledgeResponse]: + self.started_texts.append(message) + self.current_concurrency += 1 + self.max_concurrency = max(self.max_concurrency, self.current_concurrency) + try: + await asyncio.sleep(self.delay) + return Success( + kplib.KnowledgeResponse( + entities=[], + actions=[], + inverse_actions=[], + topics=[message], + ) + ) + finally: + self.current_concurrency -= 1 + + @pytest.mark.asyncio async def test_ingest_podcast( - really_needs_auth: None, temp_dir: str, embedding_model: AsyncEmbeddingModel + really_needs_auth: None, temp_dir: str, embedding_model: IEmbeddingModel ): # Import the podcast settings = ConversationSettings(embedding_model) @@ -86,3 +115,30 @@ async def test_ingest_podcast( open(filename2 + EMBEDDING_FILE_SUFFIX, "rb") as f2, ): assert f1.read() == f2.read(), "Embedding (binary) files do not match" + + +@pytest.mark.asyncio +async def test_ingest_podcast_parallelism_uses_concurrency( + temp_dir: str, embedding_model: IEmbeddingModel +) -> None: + transcript_path = os.path.join(temp_dir, "parallel_podcast.txt") + with open(transcript_path, "w") as f: + for i in range(25): + f.write(f"SPEAKER{i}: Message {i}\n") + + settings = ConversationSettings(embedding_model) + extractor = TrackingKnowledgeExtractor() + settings.semantic_ref_index_settings.knowledge_extractor = extractor + + concurrency = 5 + podcast = await podcast_ingest.ingest_podcast( + transcript_path, + settings, + start_date=Datetime.now(timezone.utc), + length_minutes=5.0, + concurrency=concurrency, + ) + + assert await podcast.messages.size() == 25 + assert extractor.max_concurrency == concurrency + assert len(extractor.started_texts) == 25 diff --git a/tests/test_property_index_population.py b/tests/test_property_index_population.py index a563ed24..5a158353 100644 --- a/tests/test_property_index_population.py +++ b/tests/test_property_index_population.py @@ -7,13 +7,12 @@ import os import tempfile -import numpy as np +from dotenv import load_dotenv import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel -from typeagent.aitools.utils import load_dotenv +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings -from typeagent.knowpro import kplib +from typeagent.knowpro import knowledge_schema as kplib from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, RelatedTermIndexSettings, @@ -23,16 +22,6 @@ from typeagent.storage import SqliteStorageProvider -class MockEmbeddingModel(AsyncEmbeddingModel): - def __init__(self): - super().__init__(embedding_size=3, model_name="test") - - async def get_embeddings(self, keys: list[str]) -> np.ndarray: - result = np.random.rand(len(keys), 3).astype(np.float32) - norms = np.linalg.norm(result, axis=1, keepdims=True) - return result / norms - - @pytest.mark.asyncio async def test_property_index_population_from_database(really_needs_auth): """Test that property index is correctly populated when reopening a database.""" @@ -42,7 +31,7 @@ async def test_property_index_population_from_database(really_needs_auth): temp_db_file.close() try: - embedding_model = MockEmbeddingModel() + embedding_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(embedding_model) message_text_settings = MessageTextIndexSettings(embedding_settings) related_terms_settings = RelatedTermIndexSettings(embedding_settings) @@ -98,7 +87,7 @@ async def test_property_index_population_from_database(really_needs_auth): # Reopen database and verify property index # Use the same embedding settings to avoid dimension mismatch - embedding_model2 = MockEmbeddingModel() + embedding_model2 = create_test_embedding_model() embedding_settings2 = TextEmbeddingIndexSettings(embedding_model2) message_text_settings2 = MessageTextIndexSettings(embedding_settings2) related_terms_settings2 = RelatedTermIndexSettings(embedding_settings2) diff --git a/tests/test_propindex.py b/tests/test_propindex.py index 5ffd9784..8f0a99d6 100644 --- a/tests/test_propindex.py +++ b/tests/test_propindex.py @@ -5,7 +5,7 @@ from typeagent.knowpro.collections import TextRangeCollection, TextRangesInScope from typeagent.knowpro.interfaces import SemanticRef, Tag, TextLocation, TextRange -from typeagent.knowpro.kplib import Action, ConcreteEntity, Facet +from typeagent.knowpro.knowledge_schema import Action, ConcreteEntity, Facet from typeagent.storage.memory import MemorySemanticRefCollection from typeagent.storage.memory.propindex import ( add_action_properties_to_index, diff --git a/tests/test_query.py b/tests/test_query.py index 5a58df3d..c6431663 100644 --- a/tests/test_query.py +++ b/tests/test_query.py @@ -646,7 +646,7 @@ async def test_lookup_knowledge_type(): TextRange, Topic, ) - from typeagent.knowpro.kplib import ConcreteEntity + from typeagent.knowpro.knowledge_schema import ConcreteEntity from typeagent.knowpro.query import lookup_knowledge_type # Create valid TextRange and knowledge objects @@ -680,3 +680,122 @@ async def test_lookup_knowledge_type(): assert {r.semantic_ref_ordinal for r in result} == {0, 2} # Should return empty list if no match assert await lookup_knowledge_type(collection, "action") == [] + + +class TestGetTextRangeForDateRange: + """Tests for the ordinal counter fix and timestamp None guard.""" + + @pytest.mark.asyncio + async def test_messages_without_ordinal_attribute(self) -> None: + """Messages that lack .ordinal should still work (manual counter).""" + + class BareMessage(FakeMessage): + """A message subclass that explicitly lacks .ordinal.""" + + def __init__(self, ts: str) -> None: + super().__init__("text") + self.timestamp = ts + if hasattr(self, "ordinal"): + del self.ordinal + + conv = FakeConversation( + messages=[ + BareMessage("2020-01-01T01:00:00"), + BareMessage("2020-01-01T02:00:00"), + ], + ) + date_range = DateRange( + start=Datetime(2020, 1, 1, 0, 0, 0), + end=Datetime(2020, 1, 2, 0, 0, 0), + ) + result = await get_text_range_for_date_range(conv, date_range) + assert result is not None + assert result.start.message_ordinal == 0 + assert result.end is not None + assert result.end.message_ordinal == 2 # exclusive end + + @pytest.mark.asyncio + async def test_none_timestamp_skipped(self) -> None: + """Messages with None timestamp should be skipped, not crash.""" + conv = FakeConversation( + messages=[ + FakeMessage("no timestamp"), # timestamp=None + FakeMessage("has timestamp", message_ordinal=1), + ], + ) + date_range = DateRange( + start=Datetime(2020, 1, 1, 0, 0, 0), + end=Datetime(2020, 1, 2, 0, 0, 0), + ) + result = await get_text_range_for_date_range(conv, date_range) + # Only message at ordinal 1 matches: + assert result is not None + assert result.start.message_ordinal == 1 + assert result.end is not None + assert result.end.message_ordinal == 2 + + @pytest.mark.asyncio + async def test_all_none_timestamps_returns_none(self) -> None: + """If every message has None timestamp, result should be None.""" + conv = FakeConversation( + messages=[FakeMessage("a"), FakeMessage("b")], + ) + date_range = DateRange( + start=Datetime(2020, 1, 1, 0, 0, 0), + end=Datetime(2020, 1, 2, 0, 0, 0), + ) + assert await get_text_range_for_date_range(conv, date_range) is None + + @pytest.mark.asyncio + async def test_single_message_in_range(self) -> None: + conv = FakeConversation( + messages=[FakeMessage("msg", message_ordinal=0)], + ) + date_range = DateRange( + start=Datetime(2020, 1, 1, 0, 0, 0), + end=Datetime(2020, 1, 2, 0, 0, 0), + ) + result = await get_text_range_for_date_range(conv, date_range) + assert result is not None + assert result.start.message_ordinal == 0 + assert result.end is not None + assert result.end.message_ordinal == 1 + + +class TestWhereSemanticRefExprProvenance: + """Verify that WhereSemanticRefExpr copies (not shares) search_term_matches.""" + + @pytest.mark.asyncio + async def test_filtered_accumulator_has_copied_provenance( + self, searchable_conversation: FakeConversation + ) -> None: + """The filtered accumulator's search_term_matches is a copy.""" + from typeagent.knowpro.query import WhereSemanticRefExpr + + # Build a source accumulator with some provenance + src = SemanticRefAccumulator() + src.search_term_matches.add("term_a") + src.add_term_matches( + Term("test"), + [ScoredSemanticRefOrdinal(0, 1.0)], + is_exact_match=True, + weight=1.0, + ) + + # Create a trivial source expression that returns the above accumulator + class ConstExpr(QueryOpExpr[SemanticRefAccumulator]): + async def eval(self, context: QueryEvalContext) -> SemanticRefAccumulator: + return src + + # WhereSemanticRefExpr with no predicates (all matches pass) + expr = WhereSemanticRefExpr( + source_expr=ConstExpr(), + predicates=[], + ) + ctx = QueryEvalContext(searchable_conversation) + filtered = await expr.eval(ctx) + + # Provenance was copied, not shared: + assert "term_a" in filtered.search_term_matches + filtered.search_term_matches.add("new_term") + assert "new_term" not in src.search_term_matches diff --git a/tests/test_query_method.py b/tests/test_query_method.py index ac605823..bcbf2e00 100644 --- a/tests/test_query_method.py +++ b/tests/test_query_method.py @@ -6,7 +6,7 @@ import pytest from typeagent import create_conversation -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.transcripts.transcript import TranscriptMessage, TranscriptMessageMeta @@ -15,7 +15,7 @@ async def test_query_method_basic(really_needs_auth: None): """Test the basic query method workflow.""" # Create a conversation with some test data - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) conversation = await create_conversation( None, @@ -60,7 +60,7 @@ async def test_query_method_basic(really_needs_auth: None): @pytest.mark.asyncio async def test_query_method_empty_conversation(really_needs_auth: None): """Test query method on an empty conversation.""" - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) conversation = await create_conversation( None, diff --git a/tests/test_related_terms_fast.py b/tests/test_related_terms_fast.py index 3919666f..43ffa7cb 100644 --- a/tests/test_related_terms_fast.py +++ b/tests/test_related_terms_fast.py @@ -9,10 +9,10 @@ import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.knowpro.interfaces import SemanticRef, TextLocation, TextRange -from typeagent.knowpro.kplib import ConcreteEntity +from typeagent.knowpro.knowledge_schema import ConcreteEntity from typeagent.podcasts.podcast import Podcast, PodcastMessage, PodcastMessageMeta from typeagent.storage import SqliteStorageProvider @@ -26,7 +26,7 @@ async def test_related_terms_index_minimal(): try: # Create minimal test data with test embedding model (no API keys needed) - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() settings = ConversationSettings(model=test_model) # Use a simple storage provider without AI embeddings diff --git a/tests/test_related_terms_index_population.py b/tests/test_related_terms_index_population.py index 5590ba19..bf40722e 100644 --- a/tests/test_related_terms_index_population.py +++ b/tests/test_related_terms_index_population.py @@ -7,12 +7,12 @@ import os import tempfile +from dotenv import load_dotenv import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME -from typeagent.aitools.utils import load_dotenv +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings -from typeagent.knowpro import kplib +from typeagent.knowpro import knowledge_schema as kplib from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, RelatedTermIndexSettings, @@ -32,7 +32,7 @@ async def test_related_terms_index_population_from_database(really_needs_auth): try: # Use the test model that's already configured in the system - embedding_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + embedding_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(embedding_model) message_text_settings = MessageTextIndexSettings(embedding_settings) related_terms_settings = RelatedTermIndexSettings(embedding_settings) diff --git a/tests/test_reltermsindex.py b/tests/test_reltermsindex.py index 57afa913..20752084 100644 --- a/tests/test_reltermsindex.py +++ b/tests/test_reltermsindex.py @@ -8,14 +8,14 @@ import pytest_asyncio # TypeAgent imports -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, RelatedTermIndexSettings, ) from typeagent.knowpro.interfaces import IMessage, ITermToRelatedTermsIndex, Term -from typeagent.knowpro.kplib import KnowledgeResponse +from typeagent.knowpro.knowledge_schema import KnowledgeResponse from typeagent.knowpro.query import CompiledSearchTerm, CompiledTermGroup from typeagent.storage import SqliteStorageProvider from typeagent.storage.memory import MemoryStorageProvider @@ -30,7 +30,7 @@ @pytest_asyncio.fixture(params=["memory", "sqlite"]) async def related_terms_index( request: pytest.FixtureRequest, - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, temp_db_path: str, ) -> AsyncGenerator[ITermToRelatedTermsIndex, None]: class DummyTestMessage(IMessage): diff --git a/tests/test_search.py b/tests/test_search.py new file mode 100644 index 00000000..7028403d --- /dev/null +++ b/tests/test_search.py @@ -0,0 +1,113 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for knowpro/search.py — SearchOptions, ConversationSearchResult.""" + +import pytest + +from typeagent.knowpro.interfaces import ( + SearchTerm, + SearchTermGroup, + Term, +) +from typeagent.knowpro.interfaces_core import ScoredMessageOrdinal +from typeagent.knowpro.query import is_conversation_searchable +from typeagent.knowpro.search import ( + ConversationSearchResult, + search_conversation_knowledge, + SearchOptions, +) + +from conftest import FakeConversation, FakeMessage, FakeTermIndex + +# --------------------------------------------------------------------------- +# SearchOptions +# --------------------------------------------------------------------------- + + +def test_search_options_defaults() -> None: + opts = SearchOptions() + assert opts.max_knowledge_matches is None + assert opts.exact_match is False + assert opts.max_message_matches is None + assert opts.max_chars_in_budget is None + assert opts.threshold_score is None + + +def test_search_options_repr_empty() -> None: + opts = SearchOptions() + # Only non-None values appear in repr; exact_match=False is still included. + r = repr(opts) + assert r.startswith("SearchOptions(") + + +def test_search_options_repr_with_fields() -> None: + opts = SearchOptions(max_knowledge_matches=5, exact_match=True) + r = repr(opts) + assert "max_knowledge_matches=5" in r + assert "exact_match=True" in r + + +# --------------------------------------------------------------------------- +# ConversationSearchResult +# --------------------------------------------------------------------------- + + +def test_conversation_search_result_basic() -> None: + result = ConversationSearchResult( + message_matches=[ScoredMessageOrdinal(0, 0.9)], + knowledge_matches={}, + raw_query_text="test", + ) + assert len(result.message_matches) == 1 + assert result.raw_query_text == "test" + + +def test_conversation_search_result_defaults() -> None: + result = ConversationSearchResult(message_matches=[], knowledge_matches={}) + assert result.raw_query_text is None + + +# --------------------------------------------------------------------------- +# is_conversation_searchable (from query.py, used heavily in search.py) +# --------------------------------------------------------------------------- + + +def test_is_conversation_searchable_true() -> None: + conv = FakeConversation( + messages=[FakeMessage("hello", 0)], + has_secondary_indexes=False, + ) + conv.semantic_ref_index = FakeTermIndex() + assert is_conversation_searchable(conv) is True + + +def test_is_conversation_searchable_no_index() -> None: + conv = FakeConversation(has_secondary_indexes=False) + conv.semantic_ref_index = None + assert is_conversation_searchable(conv) is False + + +def test_is_conversation_searchable_no_semrefs() -> None: + conv = FakeConversation(has_secondary_indexes=False) + conv.semantic_refs = None # type: ignore[assignment] + assert is_conversation_searchable(conv) is False + + +# --------------------------------------------------------------------------- +# search_conversation_knowledge returns None when not searchable +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_search_conversation_knowledge_non_searchable_returns_none() -> None: + """When the conversation has no semantic ref index, result should be None.""" + conv = FakeConversation(has_secondary_indexes=False) + conv.semantic_ref_index = None + + group = SearchTermGroup( + boolean_op="or", + terms=[SearchTerm(term=Term("hello"))], + ) + result = await search_conversation_knowledge(conv, group) + assert result is None diff --git a/tests/test_searchlang.py b/tests/test_searchlang.py new file mode 100644 index 00000000..15c80aaf --- /dev/null +++ b/tests/test_searchlang.py @@ -0,0 +1,110 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +from typeagent.knowpro.search import SearchOptions +from typeagent.knowpro.searchlang import ( + LanguageQueryCompileOptions, + LanguageSearchOptions, +) + + +class TestSearchOptionsRepr: + """Tests for the custom __repr__ on SearchOptions and LanguageSearchOptions.""" + + def test_all_defaults_shows_non_none_fields(self) -> None: + """Default fields that are not None (like exact_match=False) appear.""" + opts = SearchOptions() + r = repr(opts) + assert r.startswith("SearchOptions(") + # exact_match defaults to False, which is not None, so it shows up: + assert "exact_match=False" in r + # None-valued fields are omitted: + assert "max_knowledge_matches" not in r + + def test_non_none_fields_shown(self) -> None: + opts = SearchOptions(max_knowledge_matches=10, threshold_score=0.5) + r = repr(opts) + assert "max_knowledge_matches=10" in r + assert "threshold_score=0.5" in r + # Fields left at None are omitted: + assert "max_message_matches" not in r + assert "max_chars_in_budget" not in r + + def test_false_field_shown(self) -> None: + """False is not None, so it should appear.""" + opts = SearchOptions(exact_match=False) + assert "exact_match=False" in repr(opts) + + def test_true_field_shown(self) -> None: + opts = SearchOptions(exact_match=True) + assert "exact_match=True" in repr(opts) + + def test_all_fields_set(self) -> None: + """When every field is non-None, all appear in repr.""" + opts = SearchOptions( + max_knowledge_matches=10, + exact_match=True, + max_message_matches=20, + max_chars_in_budget=5000, + threshold_score=0.75, + ) + r = repr(opts) + assert "max_knowledge_matches=10" in r + assert "exact_match=True" in r + assert "max_message_matches=20" in r + assert "max_chars_in_budget=5000" in r + assert "threshold_score=0.75" in r + + def test_zero_values_shown(self) -> None: + """Zero is not None, so numeric zeros should appear.""" + opts = SearchOptions(max_knowledge_matches=0, threshold_score=0.0) + r = repr(opts) + assert "max_knowledge_matches=0" in r + assert "threshold_score=0.0" in r + + def test_no_dunder_or_method_names(self) -> None: + """The repr must not contain dunder names or method objects.""" + opts = SearchOptions(max_knowledge_matches=5) + r = repr(opts) + assert "__init__" not in r + assert "__eq__" not in r + assert "bound method" not in r + + +class TestLanguageSearchOptionsRepr: + """Tests for LanguageSearchOptions.__repr__ (subclass of SearchOptions).""" + + def test_all_defaults_shows_class_name(self) -> None: + opts = LanguageSearchOptions() + r = repr(opts) + # Subclass name, not parent name: + assert r.startswith("LanguageSearchOptions(") + + def test_inherited_and_own_fields(self) -> None: + opts = LanguageSearchOptions( + max_knowledge_matches=5, + compile_options=LanguageQueryCompileOptions(exact_scope=True), + ) + r = repr(opts) + assert "LanguageSearchOptions(" in r + assert "max_knowledge_matches=5" in r + assert "compile_options=" in r + assert "exact_scope=True" in r + + def test_none_fields_omitted(self) -> None: + opts = LanguageSearchOptions() + r = repr(opts) + assert "compile_options" not in r + assert "model_instructions" not in r + assert "max_knowledge_matches" not in r + + def test_no_private_fields(self) -> None: + """Fields starting with _ should never appear in repr.""" + opts = LanguageSearchOptions(max_knowledge_matches=3) + r = repr(opts) + # No key=value pair where the key starts with underscore: + inside = r.split("(", 1)[1].rstrip(")") + for part in inside.split(", "): + if "=" in part: + key = part.split("=", 1)[0] + assert not key.startswith("_"), f"private field {key!r} in repr" diff --git a/tests/test_searchlang_compile.py b/tests/test_searchlang_compile.py new file mode 100644 index 00000000..9b208fbb --- /dev/null +++ b/tests/test_searchlang_compile.py @@ -0,0 +1,638 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Unit tests for searchlang.py — compile_search_query, SearchQueryCompiler, +and related helper functions that don't require a live LLM.""" + +import datetime +from typing import Literal + +from typeagent.knowpro.date_time_schema import DateTime, DateTimeRange, DateVal, TimeVal +from typeagent.knowpro.interfaces import SearchTerm, SearchTermGroup +from typeagent.knowpro.search_query_schema import ( + ActionTerm, + EntityTerm, + FacetTerm, + SearchExpr, + SearchFilter, + SearchQuery, + VerbsTerm, +) +from typeagent.knowpro.searchlang import ( + _compile_fallback_query, + compile_search_filter, + compile_search_query, + date_range_from_datetime_range, + datetime_from_date_time, + is_entity_term_list, + LanguageQueryCompileOptions, + LanguageSearchFilter, + optimize_or_max, + SearchQueryCompiler, +) + +from conftest import FakeConversation + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def make_entity( + name: str, + types: list[str] | None = None, + facets: list[FacetTerm] | None = None, + is_pronoun: bool = False, +) -> EntityTerm: + return EntityTerm(name=name, is_name_pronoun=is_pronoun, type=types, facets=facets) + + +def make_action( + actor: list[EntityTerm] | Literal["*"] = "*", + verbs: list[str] | None = None, + targets: list[EntityTerm] | None = None, + additional: list[EntityTerm] | None = None, + is_informational: bool = False, +) -> ActionTerm: + return ActionTerm( + actor_entities=actor, + is_informational=is_informational, + action_verbs=VerbsTerm(words=verbs) if verbs else None, + target_entities=targets, + additional_entities=additional, + ) + + +def make_filter( + entities: list[EntityTerm] | None = None, + action: ActionTerm | None = None, + search_terms: list[str] | None = None, + time_range: DateTimeRange | None = None, +) -> SearchFilter: + return SearchFilter( + entity_search_terms=entities, + action_search_term=action, + search_terms=search_terms, + time_range=time_range, + ) + + +def make_query(filters: list[SearchFilter]) -> SearchQuery: + expr = SearchExpr( + rewritten_query="test query", + filters=filters, + ) + return SearchQuery(search_expressions=[expr]) + + +def make_compiler( + options: LanguageQueryCompileOptions | None = None, + lang_filter: LanguageSearchFilter | None = None, +) -> SearchQueryCompiler: + conv = FakeConversation() + return SearchQueryCompiler(conv, options, lang_filter) + + +# --------------------------------------------------------------------------- +# is_entity_term_list +# --------------------------------------------------------------------------- + + +class TestIsEntityTermList: + def test_list_returns_true(self) -> None: + terms = [make_entity("Alice")] + assert is_entity_term_list(terms) is True + + def test_empty_list_returns_true(self) -> None: + assert is_entity_term_list([]) is True + + def test_star_returns_false(self) -> None: + assert is_entity_term_list("*") is False + + def test_none_returns_false(self) -> None: + assert is_entity_term_list(None) is False + + +# --------------------------------------------------------------------------- +# optimize_or_max +# --------------------------------------------------------------------------- + + +class TestOptimizeOrMax: + def test_single_term_unwrapped(self) -> None: + inner = SearchTermGroup(boolean_op="and", terms=[]) + group = SearchTermGroup(boolean_op="or_max", terms=[inner]) + result = optimize_or_max(group) + assert result is inner + + def test_multiple_terms_kept_as_group(self) -> None: + inner1 = SearchTermGroup(boolean_op="and", terms=[]) + inner2 = SearchTermGroup(boolean_op="and", terms=[]) + group = SearchTermGroup(boolean_op="or_max", terms=[inner1, inner2]) + result = optimize_or_max(group) + assert result is group + + +# --------------------------------------------------------------------------- +# date_range_from_datetime_range / datetime_from_date_time +# --------------------------------------------------------------------------- + + +class TestDatetimeFromDateTime: + def test_date_only_zeros_time(self) -> None: + dt = datetime_from_date_time(DateTime(date=DateVal(day=15, month=6, year=2024))) + assert dt.year == 2024 + assert dt.month == 6 + assert dt.day == 15 + assert dt.hour == 0 + assert dt.minute == 0 + assert dt.second == 0 + assert dt.tzinfo == datetime.timezone.utc + + def test_with_time(self) -> None: + dt = datetime_from_date_time( + DateTime( + date=DateVal(day=1, month=1, year=2020), + time=TimeVal(hour=14, minute=30, seconds=45), + ) + ) + assert dt.hour == 14 + assert dt.minute == 30 + assert dt.second == 45 + + +class TestDateRangeFromDatetimeRange: + def test_start_only(self) -> None: + dtr = DateTimeRange( + start_date=DateTime(date=DateVal(day=1, month=1, year=2023)) + ) + dr = date_range_from_datetime_range(dtr) + assert dr.start.year == 2023 + assert dr.end is None + + def test_start_and_stop(self) -> None: + dtr = DateTimeRange( + start_date=DateTime(date=DateVal(day=1, month=1, year=2023)), + stop_date=DateTime(date=DateVal(day=31, month=12, year=2023)), + ) + dr = date_range_from_datetime_range(dtr) + assert dr.start.year == 2023 + assert dr.end is not None + assert dr.end.year == 2023 + assert dr.end.month == 12 + assert dr.end.day == 31 + + +# --------------------------------------------------------------------------- +# compile_search_query (standalone function) +# --------------------------------------------------------------------------- + + +class TestCompileSearchQuery: + def test_empty_search_expressions(self) -> None: + conv = FakeConversation() + query = SearchQuery(search_expressions=[]) + result = compile_search_query(conv, query) + assert result == [] + + def test_single_search_terms_filter(self) -> None: + conv = FakeConversation() + query = make_query([make_filter(search_terms=["robots", "AI"])]) + result = compile_search_query(conv, query) + assert len(result) == 1 + expr = result[0] + assert len(expr.select_expressions) == 1 + terms_in_group = expr.select_expressions[0].search_term_group.terms + assert any( + isinstance(t, SearchTerm) and t.term.text == "robots" + for t in terms_in_group + ) + + def test_entity_filter_produces_expr(self) -> None: + conv = FakeConversation() + query = make_query([make_filter(entities=[make_entity("Alice", ["person"])])]) + result = compile_search_query(conv, query) + assert len(result) == 1 + + def test_multiple_filters_produce_multiple_select_exprs(self) -> None: + conv = FakeConversation() + filter1 = make_filter(search_terms=["alpha"]) + filter2 = make_filter(search_terms=["beta"]) + expr = SearchExpr(rewritten_query="test", filters=[filter1, filter2]) + query = SearchQuery(search_expressions=[expr]) + result = compile_search_query(conv, query) + assert len(result) == 1 + assert len(result[0].select_expressions) == 2 + + def test_raw_query_preserved(self) -> None: + conv = FakeConversation() + query = make_query([make_filter(search_terms=["foo"])]) + query.search_expressions[0].rewritten_query = "my rewritten query" + result = compile_search_query(conv, query) + assert result[0].raw_query == "my rewritten query" + + +# --------------------------------------------------------------------------- +# compile_search_filter (standalone function) +# --------------------------------------------------------------------------- + + +class TestCompileSearchFilter: + def test_entity_filter(self) -> None: + conv = FakeConversation() + f = make_filter(entities=[make_entity("Bob")]) + result = compile_search_filter(conv, f) + assert result.search_term_group is not None + + def test_search_terms_filter(self) -> None: + conv = FakeConversation() + f = make_filter(search_terms=["climate", "change"]) + result = compile_search_filter(conv, f) + terms = result.search_term_group.terms + assert len(terms) == 2 + + def test_empty_filter_uses_topic_wildcard(self) -> None: + """A filter with no entity, action, or search_terms should produce a topic:* term.""" + conv = FakeConversation() + f = SearchFilter() + result = compile_search_filter(conv, f) + # Should produce a single topic:* property search term + terms = result.search_term_group.terms + assert len(terms) == 1 + + def test_time_range_produces_when(self) -> None: + conv = FakeConversation() + dtr = DateTimeRange( + start_date=DateTime(date=DateVal(day=1, month=1, year=2024)) + ) + f = make_filter(search_terms=["foo"], time_range=dtr) + result = compile_search_filter(conv, f) + assert result.when is not None + assert result.when.date_range is not None + + def test_no_time_range_when_is_none(self) -> None: + conv = FakeConversation() + f = make_filter(search_terms=["foo"]) + result = compile_search_filter(conv, f) + assert result.when is None + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — compile_term_group and related +# --------------------------------------------------------------------------- + + +class TestSearchQueryCompilerTermGroup: + def test_search_terms_added(self) -> None: + compiler = make_compiler() + f = make_filter(search_terms=["hello", "world"]) + group = compiler.compile_term_group(f) + texts = [t.term.text for t in group.terms if isinstance(t, SearchTerm)] + assert "hello" in texts + assert "world" in texts + + def test_entity_name_added_as_property_term(self) -> None: + compiler = make_compiler() + f = make_filter(entities=[make_entity("Ada")]) + group = compiler.compile_term_group(f) + # Should have at least one term + assert len(group.terms) > 0 + + def test_empty_entity_name_ignored(self) -> None: + compiler = make_compiler() + f = make_filter(entities=[make_entity("")]) + group = compiler.compile_term_group(f) + # Empty string is not searchable; fallback to topic:* for empty term group + # (there are topic terms added for entity_terms in compile_entity_terms) + # We just check no crash and group is returned + assert group is not None + + def test_star_entity_name_ignored(self) -> None: + compiler = make_compiler() + f = make_filter(entities=[make_entity("*")]) + group = compiler.compile_term_group(f) + assert group is not None + + def test_noise_term_ignored(self) -> None: + compiler = make_compiler() + f = make_filter(search_terms=["thing", "object", "hello"]) + group = compiler.compile_term_group(f) + texts = [t.term.text for t in group.terms if isinstance(t, SearchTerm)] + # noise terms filtered from property groups but not from search_terms path + # search_terms path does NOT call add_property_term_to_group + assert "hello" in texts + + def test_custom_term_filter_excludes_property_terms(self) -> None: + # term_filter applies to add_property_term_to_group, not compile_search_terms. + options = LanguageQueryCompileOptions(term_filter=lambda t: t != "excluded") + compiler = make_compiler(options=options) + group = SearchTermGroup(boolean_op="or", terms=[]) + compiler.add_property_term_to_group("name", "excluded", group) + compiler.add_property_term_to_group("name", "included", group) + assert len(group.terms) == 1 + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — entity terms with facets +# --------------------------------------------------------------------------- + + +class TestEntityTermsWithFacets: + def test_entity_with_type(self) -> None: + compiler = make_compiler() + entity = make_entity("Alice", types=["person"]) + f = make_filter(entities=[entity]) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_entity_with_facet_name_and_value(self) -> None: + compiler = make_compiler() + facet = FacetTerm(facet_name="profession", facet_value="writer") + entity = make_entity("Bob", facets=[facet]) + f = make_filter(entities=[entity]) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_entity_with_wildcard_facet_value(self) -> None: + compiler = make_compiler() + facet = FacetTerm(facet_name="profession", facet_value="*") + entity = make_entity("Bob", facets=[facet]) + f = make_filter(entities=[entity]) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_entity_with_wildcard_facet_name(self) -> None: + compiler = make_compiler() + facet = FacetTerm(facet_name="*", facet_value="writer") + entity = make_entity("Bob", facets=[facet]) + f = make_filter(entities=[entity]) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_entity_with_both_wildcards_no_facet_term(self) -> None: + compiler = make_compiler() + facet = FacetTerm(facet_name="*", facet_value="*") + entity = make_entity("Bob", facets=[facet]) + f = make_filter(entities=[entity]) + group = compiler.compile_term_group(f) + # Both wildcards => no facet term added, but entity name term (or_max) + # and topic term for "Bob" are still generated — 2 terms total. + assert len(group.terms) == 2 + + def test_pronoun_entity_skipped(self) -> None: + compiler = make_compiler() + pronoun = make_entity("it", is_pronoun=True) + normal = make_entity("Alice") + f = make_filter(entities=[pronoun, normal]) + group = compiler.compile_term_group(f) + # Only Alice's term should be added + assert len(group.terms) > 0 + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — action terms +# --------------------------------------------------------------------------- + + +class TestActionTerms: + def test_action_with_verbs_adds_verb_terms(self) -> None: + compiler = make_compiler() + actor = make_entity("Alice") + action = make_action(actor=[actor], verbs=["sent", "emailed"]) + f = make_filter(action=action) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_action_with_target_entities(self) -> None: + compiler = make_compiler() + actor = make_entity("Alice") + target = make_entity("Bob") + action = make_action(actor=[actor], verbs=["sent"], targets=[target]) + f = make_filter(action=action) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_action_with_additional_entities(self) -> None: + compiler = make_compiler() + actor = make_entity("Alice") + extra = make_entity("Charlie") + action = make_action(actor=[actor], verbs=["spoke"], additional=[extra]) + f = make_filter(action=action) + group = compiler.compile_term_group(f) + assert len(group.terms) > 0 + + def test_action_star_actor_no_scope(self) -> None: + """When actor_entities is '*', scope is not applied.""" + action = make_action(actor="*", verbs=["played"]) + f = make_filter(action=action) + result = compile_search_filter(FakeConversation(), f) + # should have no scope (when is None or when.scope_defining_terms is empty) + when = result.when + assert when is None or ( + when.scope_defining_terms is None + or len(when.scope_defining_terms.terms) == 0 + ) + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — compile_when with scope +# --------------------------------------------------------------------------- + + +class TestCompileWhen: + def test_no_action_no_when(self) -> None: + compiler = make_compiler() + f = make_filter(search_terms=["foo"]) + when = compiler.compile_when(f) + assert when is None + + def test_time_range_produces_date_range(self) -> None: + compiler = make_compiler() + dtr = DateTimeRange( + start_date=DateTime(date=DateVal(day=1, month=3, year=2025)), + stop_date=DateTime(date=DateVal(day=31, month=3, year=2025)), + ) + f = make_filter(search_terms=["foo"], time_range=dtr) + when = compiler.compile_when(f) + assert when is not None + assert when.date_range is not None + assert when.date_range.start.month == 3 + + def test_informational_action_no_scope(self) -> None: + compiler = make_compiler() + actor = make_entity("Alice") + action = make_action(actor=[actor], verbs=["spoke"], is_informational=True) + f = make_filter(action=action) + when = compiler.compile_when(f) + # is_informational = True → should_add_scope returns False → no scope in when + assert when is None or ( + when.scope_defining_terms is None + or len(when.scope_defining_terms.terms) == 0 + ) + + def test_actor_entities_list_adds_scope(self) -> None: + compiler = make_compiler() + actor = make_entity("Alice") + action = make_action(actor=[actor], verbs=["sent"]) + f = make_filter(action=action) + when = compiler.compile_when(f) + assert when is not None + assert when.scope_defining_terms is not None + assert len(when.scope_defining_terms.terms) > 0 + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — compile_search_terms +# --------------------------------------------------------------------------- + + +class TestCompileSearchTerms: + def test_returns_search_term_group(self) -> None: + compiler = make_compiler() + group = compiler.compile_search_terms(["alpha", "beta"]) + texts = [t.term.text for t in group.terms if isinstance(t, SearchTerm)] + assert "alpha" in texts + assert "beta" in texts + + def test_appends_to_existing_group(self) -> None: + compiler = make_compiler() + existing = SearchTermGroup(boolean_op="or", terms=[]) + compiler.compile_search_terms(["gamma"], existing) + texts = [t.term.text for t in existing.terms if isinstance(t, SearchTerm)] + assert "gamma" in texts + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — is_searchable_string / is_noise_term +# --------------------------------------------------------------------------- + + +class TestIsSearchableString: + def test_normal_string_is_searchable(self) -> None: + compiler = make_compiler() + assert compiler.is_searchable_string("hello") is True + + def test_empty_string_not_searchable(self) -> None: + compiler = make_compiler() + assert compiler.is_searchable_string("") is False + + def test_star_not_searchable(self) -> None: + compiler = make_compiler() + assert compiler.is_searchable_string("*") is False + + def test_term_filter_respected(self) -> None: + options = LanguageQueryCompileOptions(term_filter=lambda t: t != "skip") + compiler = make_compiler(options=options) + assert compiler.is_searchable_string("skip") is False + assert compiler.is_searchable_string("keep") is True + + +class TestIsNoiseTerm: + def test_noise_words(self) -> None: + compiler = make_compiler() + for word in ("thing", "object", "concept", "idea", "entity"): + assert compiler.is_noise_term(word) is True + + def test_non_noise_word(self) -> None: + compiler = make_compiler() + assert compiler.is_noise_term("robot") is False + + def test_case_insensitive(self) -> None: + compiler = make_compiler() + assert compiler.is_noise_term("THING") is True + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — deduplication +# --------------------------------------------------------------------------- + + +class TestDeduplication: + def test_duplicate_property_term_not_added_twice(self) -> None: + compiler = make_compiler() + group = SearchTermGroup(boolean_op="or", terms=[]) + compiler.add_property_term_to_group("name", "Alice", group) + compiler.add_property_term_to_group("name", "Alice", group) + assert len(group.terms) == 1 + + def test_different_property_names_both_added(self) -> None: + compiler = make_compiler() + group = SearchTermGroup(boolean_op="or", terms=[]) + compiler.add_property_term_to_group("name", "Alice", group) + compiler.add_property_term_to_group("topic", "Alice", group) + assert len(group.terms) == 2 + + def test_dedupe_disabled_allows_duplicates(self) -> None: + compiler = make_compiler() + compiler.dedupe = False + group = SearchTermGroup(boolean_op="or", terms=[]) + compiler.add_property_term_to_group("name", "Alice", group) + compiler.add_property_term_to_group("name", "Alice", group) + assert len(group.terms) == 2 + + +# --------------------------------------------------------------------------- +# _compile_fallback_query +# --------------------------------------------------------------------------- + + +class TestCompileFallbackQuery: + def test_exact_scope_no_fallback(self) -> None: + conv = FakeConversation() + options = LanguageQueryCompileOptions(exact_scope=True, verb_scope=True) + query = make_query([make_filter(search_terms=["foo"])]) + result = _compile_fallback_query(conv, query, options) + assert result is None + + def test_no_verb_scope_no_fallback(self) -> None: + conv = FakeConversation() + options = LanguageQueryCompileOptions(exact_scope=False, verb_scope=False) + query = make_query([make_filter(search_terms=["foo"])]) + result = _compile_fallback_query(conv, query, options) + assert result is None + + def test_verb_scope_and_not_exact_produces_fallback(self) -> None: + conv = FakeConversation() + options = LanguageQueryCompileOptions(exact_scope=False, verb_scope=True) + query = make_query([make_filter(search_terms=["foo"])]) + result = _compile_fallback_query(conv, query, options) + # Should return a list of SearchQueryExpr (fallback without verb matching) + assert result is not None + assert isinstance(result, list) + assert len(result) == 1 + + +# --------------------------------------------------------------------------- +# SearchQueryCompiler — compile_action_term_as_search_terms (use_or_max=False) +# --------------------------------------------------------------------------- + + +class TestCompileActionTermAsSearchTerms: + def test_no_verbs_no_actor_empty_group(self) -> None: + compiler = make_compiler() + action = ActionTerm( + actor_entities="*", + is_informational=False, + ) + group = compiler.compile_action_term_as_search_terms(action, use_or_max=False) + # actor is "*" so no actor entities; no verbs; result depends on implementation + assert group is not None + + def test_use_or_max_false_merges_into_same_group(self) -> None: + compiler = make_compiler() + actor = make_entity("Alice") + action = make_action(actor=[actor], verbs=["sent"]) + group = compiler.compile_action_term_as_search_terms(action, use_or_max=False) + assert len(group.terms) > 0 + + def test_empty_or_max_not_appended(self) -> None: + """With use_or_max=True but no verbs/actors, or_max wrapper should not be appended.""" + compiler = make_compiler() + action = ActionTerm( + actor_entities="*", + is_informational=False, + ) + outer = SearchTermGroup(boolean_op="or", terms=[]) + compiler.compile_action_term_as_search_terms(action, outer, use_or_max=True) + # or_max only appended if non-empty + assert len(outer.terms) == 0 diff --git a/tests/test_secindex.py b/tests/test_secindex.py index a9008aa3..39665b05 100644 --- a/tests/test_secindex.py +++ b/tests/test_secindex.py @@ -3,7 +3,7 @@ import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( ConversationSettings, @@ -29,9 +29,9 @@ def simple_conversation() -> FakeConversation: @pytest.fixture def conversation_settings(needs_auth: None) -> ConversationSettings: - from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME + from typeagent.aitools.model_adapters import create_test_embedding_model - model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + model = create_test_embedding_model() return ConversationSettings(model) @@ -41,7 +41,7 @@ def test_conversation_secondary_indexes_initialization( """Test initialization of ConversationSecondaryIndexes.""" storage_provider = memory_storage # Create proper settings for testing - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(test_model) settings = RelatedTermIndexSettings(embedding_settings) indexes = ConversationSecondaryIndexes(storage_provider, settings) diff --git a/tests/test_secindex_storage_integration.py b/tests/test_secindex_storage_integration.py index a050771b..15738bb6 100644 --- a/tests/test_secindex_storage_integration.py +++ b/tests/test_secindex_storage_integration.py @@ -4,7 +4,7 @@ # Test that ConversationSecondaryIndexes now uses storage provider properly import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel, TEST_MODEL_NAME +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import RelatedTermIndexSettings from typeagent.knowpro.secindex import ConversationSecondaryIndexes @@ -19,7 +19,7 @@ async def test_secondary_indexes_use_storage_provider( storage_provider = memory_storage # Create test settings - test_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + test_model = create_test_embedding_model() embedding_settings = TextEmbeddingIndexSettings(test_model) related_terms_settings = RelatedTermIndexSettings(embedding_settings) diff --git a/tests/test_semrefindex.py b/tests/test_semrefindex.py index 5fbff6cc..5f580992 100644 --- a/tests/test_semrefindex.py +++ b/tests/test_semrefindex.py @@ -8,7 +8,7 @@ import pytest_asyncio # TypeAgent imports -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, @@ -20,14 +20,19 @@ ITermToSemanticRefIndex, Topic, ) -from typeagent.knowpro.kplib import Action, ConcreteEntity, Facet, KnowledgeResponse +from typeagent.knowpro.knowledge_schema import ( + Action, + ConcreteEntity, + Facet, + KnowledgeResponse, +) from typeagent.storage import SqliteStorageProvider from typeagent.storage.memory import MemoryStorageProvider from typeagent.storage.memory.semrefindex import ( - add_action_to_index, - add_entity_to_index, + add_action, + add_entity, add_knowledge_to_index, - add_topic_to_index, + add_topic, TermToSemanticRefIndex, ) @@ -37,7 +42,7 @@ @pytest_asyncio.fixture(params=["memory", "sqlite"]) async def semantic_ref_index( request: pytest.FixtureRequest, - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, temp_db_path: str, ) -> AsyncGenerator[ITermToSemanticRefIndex, None]: """Unified fixture to create a semantic ref index for both memory and SQLite providers.""" @@ -97,7 +102,7 @@ def get_knowledge(self): @pytest_asyncio.fixture(params=["memory", "sqlite"]) async def semantic_ref_setup( request: pytest.FixtureRequest, - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, temp_db_path: str, ) -> AsyncGenerator[Dict[str, ITermToSemanticRefIndex | ISemanticRefCollection], None]: """Unified fixture that provides both semantic ref index and collection for testing helper functions.""" @@ -211,7 +216,7 @@ async def test_semantic_ref_index_serialize_and_deserialize( @pytest.mark.asyncio -async def test_add_entity_to_index( +async def test_add_entity( semantic_ref_setup: Dict[str, ITermToSemanticRefIndex | ISemanticRefCollection], needs_auth: None, ) -> None: @@ -224,7 +229,7 @@ async def test_add_entity_to_index( type=["object", "example"], facets=[Facet(name="color", value="blue")], ) - await add_entity_to_index(entity, semantic_refs, semantic_ref_index, 0) + await add_entity(entity, semantic_refs, semantic_ref_index, 0) assert await semantic_refs.size() == 1 assert (await semantic_refs.get_item(0)).knowledge.knowledge_type == "entity" @@ -247,7 +252,7 @@ async def test_add_entity_to_index( @pytest.mark.asyncio -async def test_add_topic_to_index( +async def test_add_topic( semantic_ref_setup: Dict[str, ITermToSemanticRefIndex | ISemanticRefCollection], needs_auth: None, ) -> None: @@ -256,7 +261,7 @@ async def test_add_topic_to_index( semantic_refs: ISemanticRefCollection = semantic_ref_setup["collection"] # type: ignore topic = "ExampleTopic" - await add_topic_to_index(topic, semantic_refs, semantic_ref_index, 0) + await add_topic(Topic(text=topic), semantic_refs, semantic_ref_index, 0) assert await semantic_refs.size() == 1 assert (await semantic_refs.get_item(0)).knowledge.knowledge_type == "topic" @@ -270,7 +275,7 @@ async def test_add_topic_to_index( @pytest.mark.asyncio -async def test_add_action_to_index( +async def test_add_action( semantic_ref_setup: Dict[str, ITermToSemanticRefIndex | ISemanticRefCollection], needs_auth: None, ) -> None: @@ -287,7 +292,7 @@ async def test_add_action_to_index( params=None, subject_entity_facet=None, ) - await add_action_to_index(action, semantic_refs, semantic_ref_index, 0) + await add_action(action, semantic_refs, semantic_ref_index, 0) assert await semantic_refs.size() == 1 assert (await semantic_refs.get_item(0)).knowledge.knowledge_type == "action" diff --git a/tests/test_serialization.py b/tests/test_serialization.py index 92aa71d9..d32b9526 100644 --- a/tests/test_serialization.py +++ b/tests/test_serialization.py @@ -10,15 +10,22 @@ from typeagent.knowpro.interfaces import ( ConversationDataWithIndexes, MessageTextIndexData, + Tag, TermsToRelatedTermsIndexData, TextToTextLocationIndexData, + Topic, ) -from typeagent.knowpro.kplib import ConcreteEntity, Quantity +from typeagent.knowpro.knowledge_schema import ConcreteEntity, Quantity from typeagent.knowpro.serialization import ( + ConversationBinaryData, + ConversationFileData, + ConversationJsonData, create_file_header, DeserializationError, + deserialize_knowledge, deserialize_object, from_conversation_file_data, + is_primitive, serialize_embeddings, serialize_object, to_conversation_file_data, @@ -113,7 +120,7 @@ def test_write_and_read_conversation_data( # Read back the data read_data = Podcast._read_conversation_data_from_file( - str(filename), embedding_size=2 + str(filename), ) assert read_data is not None assert read_data.get("relatedTermsIndexData") is not None @@ -133,3 +140,136 @@ def test_deserialization_error(): """Test that DeserializationError is raised for invalid data.""" with pytest.raises(DeserializationError, match="Pydantic validation failed"): deserialize_object(Quantity, {"invalid_key": "value"}) + + +# --------------------------------------------------------------------------- +# Additional tests for broader coverage +# --------------------------------------------------------------------------- + + +def test_from_conversation_file_data_missing_header_raises(): + """from_conversation_file_data raises when fileHeader is absent.""" + json_data: ConversationJsonData[Any] = ConversationJsonData( + nameTag="x", messages=[], tags=[], semanticRefs=None + ) + file_data: ConversationFileData[Any] = ConversationFileData( + jsonData=json_data, + binaryData=ConversationBinaryData(embeddingsList=[]), + ) + with pytest.raises(DeserializationError, match="Missing file header"): + from_conversation_file_data(file_data) + + +def test_from_conversation_file_data_bad_version_raises(): + """from_conversation_file_data raises on unsupported version.""" + json_data: ConversationJsonData[Any] = ConversationJsonData( + nameTag="x", + messages=[], + tags=[], + semanticRefs=None, + fileHeader={"version": "99.9"}, + embeddingFileHeader={}, + ) + file_data: ConversationFileData[Any] = ConversationFileData( + jsonData=json_data, + binaryData=ConversationBinaryData(embeddingsList=[]), + ) + with pytest.raises(DeserializationError, match="Unsupported file version"): + from_conversation_file_data(file_data) + + +def test_from_conversation_file_data_missing_embedding_header_raises(): + """from_conversation_file_data raises when embeddingFileHeader is absent.""" + json_data: ConversationJsonData[Any] = ConversationJsonData( + nameTag="x", + messages=[], + tags=[], + semanticRefs=None, + fileHeader={"version": "0.1"}, + ) + file_data: ConversationFileData[Any] = ConversationFileData( + jsonData=json_data, + binaryData=ConversationBinaryData(embeddingsList=[]), + ) + with pytest.raises(DeserializationError, match="Missing embedding file header"): + from_conversation_file_data(file_data) + + +def test_from_conversation_file_data_missing_embeddings_list_raises(): + """from_conversation_file_data raises when embeddingsList is None.""" + json_data: ConversationJsonData[Any] = ConversationJsonData( + nameTag="x", + messages=[], + tags=[], + semanticRefs=None, + fileHeader={"version": "0.1"}, + embeddingFileHeader={}, + ) + file_data: ConversationFileData[Any] = ConversationFileData( + jsonData=json_data, + binaryData=ConversationBinaryData(embeddingsList=None), + ) + with pytest.raises(DeserializationError, match="Missing embeddings list"): + from_conversation_file_data(file_data) + + +def test_from_conversation_file_data_success_empty(): + """from_conversation_file_data succeeds with minimal valid data.""" + emb = np.zeros((0, 4), dtype=np.float32) + json_data: ConversationJsonData[Any] = ConversationJsonData( + nameTag="test", + messages=[], + tags=[], + semanticRefs=None, + fileHeader={"version": "0.1"}, + embeddingFileHeader={}, + ) + file_data: ConversationFileData[Any] = ConversationFileData( + jsonData=json_data, + binaryData=ConversationBinaryData(embeddingsList=[emb]), + ) + result = from_conversation_file_data(file_data) + assert result["nameTag"] == "test" + + +def test_is_primitive(): + """Test is_primitive classification.""" + for t in (int, float, bool, str, type(None)): + assert is_primitive(t), f"Expected {t} to be primitive" + assert not is_primitive(list) + assert not is_primitive(dict) + + +def test_deserialize_object_union_none(): + """deserialize_object handles optional (X | None) type with None input.""" + result = deserialize_object(int | None, None) + assert result is None + + +def test_deserialize_object_list_of_int(): + """deserialize_object can deserialize a list of ints.""" + result = deserialize_object(list[int], [1, 2, 3]) + assert result == [1, 2, 3] + + +def test_deserialize_knowledge_entity(): + """deserialize_knowledge reconstructs a ConcreteEntity.""" + obj = {"name": "Bob", "type": ["person"]} + result = deserialize_knowledge("entity", obj) + assert isinstance(result, ConcreteEntity) + assert result.name == "Bob" + + +def test_deserialize_knowledge_topic(): + """deserialize_knowledge reconstructs a Topic.""" + obj = {"text": "AI ethics"} + result = deserialize_knowledge("topic", obj) + assert isinstance(result, Topic) + assert result.text == "AI ethics" + + +def test_deserialize_knowledge_tag(): + """deserialize_knowledge reconstructs a Tag.""" + obj = {"text": "important"} + result = deserialize_knowledge("tag", obj) + assert isinstance(result, Tag) diff --git a/tests/test_source_id_ingestion.py b/tests/test_source_id_ingestion.py new file mode 100644 index 00000000..1886a7b7 --- /dev/null +++ b/tests/test_source_id_ingestion.py @@ -0,0 +1,159 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for source_id-based ingestion tracking in add_messages_with_indexing.""" + +import os +import tempfile + +import pytest + +from typeagent.aitools.model_adapters import create_test_embedding_model +from typeagent.knowpro.convsettings import ConversationSettings +from typeagent.storage.sqlite.provider import SqliteStorageProvider +from typeagent.transcripts.transcript import ( + Transcript, + TranscriptMessage, + TranscriptMessageMeta, +) + + +def _make_message( + text: str, speaker: str = "Alice", source_id: str | None = None +) -> TranscriptMessage: + return TranscriptMessage( + text_chunks=[text], + metadata=TranscriptMessageMeta(speaker=speaker), + tags=["test"], + source_id=source_id, + ) + + +async def _create_transcript( + db_path: str, +) -> tuple[Transcript, SqliteStorageProvider]: + model = create_test_embedding_model() + settings = ConversationSettings(model=model) + settings.semantic_ref_index_settings.auto_extract_knowledge = False + storage = SqliteStorageProvider( + db_path, + message_type=TranscriptMessage, + message_text_index_settings=settings.message_text_index_settings, + related_term_index_settings=settings.related_term_index_settings, + ) + settings.storage_provider = storage + transcript = await Transcript.create(settings, name="test") + return transcript, storage + + +def _ingested_count(storage: SqliteStorageProvider) -> int: + """Count rows in IngestedSources table.""" + cursor = storage.db.cursor() + cursor.execute("SELECT COUNT(*) FROM IngestedSources") + return cursor.fetchone()[0] + + +@pytest.mark.asyncio +async def test_explicit_source_ids_marks_ingested() -> None: + """Passing source_ids= explicitly marks those IDs as ingested.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [_make_message("Hello"), _make_message("World")] + await transcript.add_messages_with_indexing(msgs, source_ids=["src-1", "src-2"]) + + assert await storage.is_source_ingested("src-1") + assert await storage.is_source_ingested("src-2") + assert not await storage.is_source_ingested("src-3") + + await storage.close() + + +@pytest.mark.asyncio +async def test_message_source_id_marks_ingested() -> None: + """When source_ids is omitted, message.source_id is used.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [ + _make_message("Hello", source_id="msg-src-1"), + _make_message("World", source_id="msg-src-2"), + ] + await transcript.add_messages_with_indexing(msgs) + + assert await storage.is_source_ingested("msg-src-1") + assert await storage.is_source_ingested("msg-src-2") + + await storage.close() + + +@pytest.mark.asyncio +async def test_message_source_id_none_skipped() -> None: + """Messages with source_id=None are silently skipped (no ingestion mark).""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [ + _make_message("Hello", source_id="only-one"), + _make_message("World"), # source_id=None + ] + await transcript.add_messages_with_indexing(msgs) + + assert await storage.is_source_ingested("only-one") + # The second message had no source_id, so nothing extra was marked + assert await storage.get_source_status("only-one") == "ingested" + assert _ingested_count(storage) == 1 + + await storage.close() + + +@pytest.mark.asyncio +async def test_explicit_source_ids_overrides_message_source_id() -> None: + """Passing source_ids= takes precedence; message.source_id is ignored.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [ + _make_message("Hello", source_id="msg-level"), + ] + await transcript.add_messages_with_indexing(msgs, source_ids=["explicit-id"]) + + assert await storage.is_source_ingested("explicit-id") + assert not await storage.is_source_ingested("msg-level") + + await storage.close() + + +@pytest.mark.asyncio +async def test_source_ids_length_mismatch_raises() -> None: + """Passing source_ids with wrong length raises ValueError.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [_make_message("Hello"), _make_message("World")] + with pytest.raises(ValueError, match="Length of source_ids"): + await transcript.add_messages_with_indexing(msgs, source_ids=["only-one"]) + + await storage.close() + + +@pytest.mark.asyncio +async def test_no_source_ids_no_message_source_id() -> None: + """When neither source_ids nor message.source_id is set, nothing is marked.""" + with tempfile.TemporaryDirectory() as tmpdir: + db_path = os.path.join(tmpdir, "test.db") + transcript, storage = await _create_transcript(db_path) + + msgs = [_make_message("Hello"), _make_message("World")] + result = await transcript.add_messages_with_indexing(msgs) + + assert result.messages_added == 2 + # No source tracking happened + assert _ingested_count(storage) == 0 + + await storage.close() diff --git a/tests/test_sqlite_indexes.py b/tests/test_sqlite_indexes.py index 6cfe210d..825f57d8 100644 --- a/tests/test_sqlite_indexes.py +++ b/tests/test_sqlite_indexes.py @@ -10,7 +10,7 @@ import pytest -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro import interfaces from typeagent.knowpro.convsettings import MessageTextIndexSettings @@ -35,7 +35,7 @@ @pytest.fixture def embedding_settings( - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, ) -> TextEmbeddingIndexSettings: """Create TextEmbeddingIndexSettings for testing.""" return TextEmbeddingIndexSettings(embedding_model) @@ -157,18 +157,14 @@ async def test_timestamp_operations(self, sqlite_db: sqlite3.Connection): # First, we need to create some messages in the database for the timestamps to reference cursor = sqlite_db.cursor() - cursor.execute( - """ + cursor.execute(""" INSERT INTO Messages (msg_id, chunks, start_timestamp) VALUES (1, '["test message 1"]', NULL) - """ - ) - cursor.execute( - """ + """) + cursor.execute(""" INSERT INTO Messages (msg_id, chunks, start_timestamp) VALUES (2, '["test message 2"]', NULL) - """ - ) + """) sqlite_db.commit() # Add timestamps to existing messages diff --git a/tests/test_sqlitestore.py b/tests/test_sqlitestore.py index ec9a8612..27a522d0 100644 --- a/tests/test_sqlitestore.py +++ b/tests/test_sqlitestore.py @@ -3,13 +3,14 @@ from collections.abc import AsyncGenerator from dataclasses import field +from datetime import datetime import pytest import pytest_asyncio from pydantic.dataclasses import dataclass -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, @@ -22,7 +23,7 @@ TextRange, Topic, ) -from typeagent.knowpro.kplib import KnowledgeResponse +from typeagent.knowpro.knowledge_schema import KnowledgeResponse from typeagent.storage import SqliteStorageProvider @@ -39,7 +40,7 @@ def get_knowledge(self) -> KnowledgeResponse: @pytest_asyncio.fixture async def dummy_sqlite_storage_provider( - temp_db_path: str, embedding_model: AsyncEmbeddingModel + temp_db_path: str, embedding_model: IEmbeddingModel ) -> AsyncGenerator[SqliteStorageProvider[DummyMessage], None]: """Create a SqliteStorageProvider for testing.""" embedding_settings = TextEmbeddingIndexSettings(embedding_model) @@ -128,8 +129,6 @@ async def test_sqlite_timestamp_index( dummy_sqlite_storage_provider: SqliteStorageProvider[DummyMessage], ): """Test SqliteTimestampToTextRangeIndex functionality.""" - from datetime import datetime - from typeagent.knowpro.interfaces import DateRange # Set up database with some messages @@ -189,3 +188,18 @@ async def test_sqlite_timestamp_index( empty_range = DateRange(start=empty_start, end=empty_end) empty_results = await timestamp_index.lookup_range(empty_range) assert len(empty_results) == 0 + + +@pytest.mark.asyncio +async def test_sqlite_nested_transaction_error( + dummy_sqlite_storage_provider: SqliteStorageProvider[DummyMessage], +): + """Verify that nested transactions are handled gracefully with a clear error.""" + provider = dummy_sqlite_storage_provider + + # First transaction should work + async with provider: + # Try to start a nested transaction - should raise a clear RuntimeError + with pytest.raises(RuntimeError, match="Cannot start a new transaction"): + async with provider: + pass diff --git a/tests/test_storage_providers_unified.py b/tests/test_storage_providers_unified.py index 67ae9d7c..179b1a7b 100644 --- a/tests/test_storage_providers_unified.py +++ b/tests/test_storage_providers_unified.py @@ -9,6 +9,8 @@ """ from dataclasses import field +import os +import tempfile from typing import assert_never, AsyncGenerator import pytest @@ -16,9 +18,9 @@ from pydantic.dataclasses import dataclass -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings -from typeagent.knowpro import kplib +from typeagent.knowpro import knowledge_schema as kplib from typeagent.knowpro.convsettings import ( MessageTextIndexSettings, RelatedTermIndexSettings, @@ -34,7 +36,7 @@ TextRange, Topic, ) -from typeagent.knowpro.kplib import KnowledgeResponse +from typeagent.knowpro.knowledge_schema import KnowledgeResponse from typeagent.storage import SqliteStorageProvider from typeagent.storage.memory import MemoryStorageProvider @@ -52,7 +54,7 @@ def get_knowledge(self) -> KnowledgeResponse: @pytest_asyncio.fixture(params=["memory", "sqlite"]) async def storage_provider_type( request: pytest.FixtureRequest, - embedding_model: AsyncEmbeddingModel, + embedding_model: IEmbeddingModel, temp_db_path: str, ) -> AsyncGenerator[tuple[IStorageProvider, str], None]: """Parameterized fixture that provides both memory and sqlite storage providers.""" @@ -328,7 +330,7 @@ async def test_conversation_threads_interface_parity( # Cross-provider validation tests @pytest.mark.asyncio async def test_cross_provider_message_collection_equivalence( - embedding_model: AsyncEmbeddingModel, temp_db_path: str, needs_auth: None + embedding_model: IEmbeddingModel, temp_db_path: str, needs_auth: None ): """Test that both providers handle message collections equivalently.""" # Create both providers with identical settings @@ -586,7 +588,7 @@ async def test_timestamp_index_with_data( @pytest.mark.asyncio async def test_storage_provider_independence( - embedding_model: AsyncEmbeddingModel, temp_db_path: str, needs_auth: None + embedding_model: IEmbeddingModel, temp_db_path: str, needs_auth: None ): """Test that different storage provider instances work independently.""" # Create settings shared between providers @@ -605,9 +607,6 @@ async def test_storage_provider_independence( ) # Create two sqlite providers (with different temp files) - import os - import tempfile - temp_file1 = tempfile.NamedTemporaryFile(suffix=".sqlite", delete=False) temp_path1 = temp_file1.name temp_file1.close() diff --git a/tests/test_textlocindex.py b/tests/test_textlocindex.py new file mode 100644 index 00000000..a9e6454f --- /dev/null +++ b/tests/test_textlocindex.py @@ -0,0 +1,146 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Tests for knowpro/textlocindex.py (TextToTextLocationIndex).""" + +import numpy as np +import pytest + +from typeagent.aitools.model_adapters import create_test_embedding_model +from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings +from typeagent.knowpro.interfaces import TextLocation, TextToTextLocationIndexData +from typeagent.knowpro.textlocindex import TextToTextLocationIndex + + +@pytest.fixture +def settings() -> TextEmbeddingIndexSettings: + return TextEmbeddingIndexSettings(create_test_embedding_model()) + + +@pytest.fixture +def index(settings: TextEmbeddingIndexSettings) -> TextToTextLocationIndex: + return TextToTextLocationIndex(settings) + + +# --------------------------------------------------------------------------- +# Empty index +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_empty_size(index: TextToTextLocationIndex) -> None: + assert await index.size() == 0 + + +@pytest.mark.asyncio +async def test_empty_is_empty(index: TextToTextLocationIndex) -> None: + assert await index.is_empty() + + +def test_get_out_of_range_returns_default(index: TextToTextLocationIndex) -> None: + assert index.get(0) is None + assert index.get(-1) is None + assert index.get(0, TextLocation(99)) == TextLocation(99) + + +# --------------------------------------------------------------------------- +# clear() +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_clear_resets(index: TextToTextLocationIndex) -> None: + loc = TextLocation(message_ordinal=0) + await index.add_text_location("hello world", loc) + assert await index.size() == 1 + index.clear() + assert await index.size() == 0 + assert await index.is_empty() + + +# --------------------------------------------------------------------------- +# serialize / deserialize round-trip (no real embeddings needed) +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_serialize_empty(index: TextToTextLocationIndex) -> None: + data = index.serialize() + assert data["textLocations"] == [] + # embeddings may be None or an empty ndarray + emb = data["embeddings"] + assert emb is None or (hasattr(emb, "shape") and emb.size == 0) + + +def test_deserialize_raises_on_no_embeddings( + index: TextToTextLocationIndex, +) -> None: + data: TextToTextLocationIndexData = { + "textLocations": [{"messageOrdinal": 0, "chunkOrdinal": 0}], + "embeddings": None, + } + with pytest.raises(ValueError, match="No embeddings found"): + index.deserialize(data) + + +def test_deserialize_raises_on_length_mismatch( + index: TextToTextLocationIndex, settings: TextEmbeddingIndexSettings +) -> None: + # The test embedding model uses size 3 by default. + emb_size = 3 + fake_emb = np.zeros((3, emb_size), dtype=np.float32) + data: TextToTextLocationIndexData = { + # 2 locations but 3 embeddings → mismatch + "textLocations": [ + {"messageOrdinal": 0, "chunkOrdinal": 0}, + {"messageOrdinal": 1, "chunkOrdinal": 0}, + ], + "embeddings": fake_emb, + } + with pytest.raises(ValueError): + index.deserialize(data) + + +def test_deserialize_valid_data( + index: TextToTextLocationIndex, settings: TextEmbeddingIndexSettings +) -> None: + emb_size = 3 # default size for create_test_embedding_model() + n = 2 + fake_emb = np.zeros((n, emb_size), dtype=np.float32) + data: TextToTextLocationIndexData = { + "textLocations": [ + {"messageOrdinal": 0, "chunkOrdinal": 0}, + {"messageOrdinal": 1, "chunkOrdinal": 0}, + ], + "embeddings": fake_emb, + } + index.deserialize(data) + assert index.get(0) == TextLocation(0) + assert index.get(1) == TextLocation(1) + assert index.get(2) is None + + +# --------------------------------------------------------------------------- +# get() helper +# --------------------------------------------------------------------------- + + +def test_get_returns_correct_location( + index: TextToTextLocationIndex, settings: TextEmbeddingIndexSettings +) -> None: + emb_size = 3 # default size for create_test_embedding_model() + n = 3 + fake_emb = np.zeros((n, emb_size), dtype=np.float32) + data: TextToTextLocationIndexData = { + "textLocations": [ + {"messageOrdinal": 10, "chunkOrdinal": 0}, + {"messageOrdinal": 20, "chunkOrdinal": 1}, + {"messageOrdinal": 30, "chunkOrdinal": 0}, + ], + "embeddings": fake_emb, + } + index.deserialize(data) + assert index.get(0) == TextLocation(10, 0) + assert index.get(1) == TextLocation(20, 1) + assert index.get(2) == TextLocation(30, 0) + assert index.get(3) is None diff --git a/tests/test_transcripts.py b/tests/test_transcripts.py index 9d930034..e2354405 100644 --- a/tests/test_transcripts.py +++ b/tests/test_transcripts.py @@ -5,11 +5,19 @@ import os import pytest +import webvtt -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.embeddings import IEmbeddingModel +from typeagent.aitools.model_adapters import create_test_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.knowpro.universal_message import format_timestamp_utc, UNIX_EPOCH +from typeagent.storage.memory.collections import ( + MemoryMessageCollection, + MemorySemanticRefCollection, +) +from typeagent.storage.memory.semrefindex import TermToSemanticRefIndex from typeagent.transcripts.transcript import ( + split_speaker_name, Transcript, TranscriptMessage, TranscriptMessageMeta, @@ -18,6 +26,7 @@ extract_speaker_from_text, get_transcript_duration, get_transcript_speakers, + parse_voice_tags, webvtt_timestamp_to_seconds, ) @@ -88,7 +97,7 @@ def test_get_transcript_info(): @pytest.fixture def conversation_settings( - needs_auth: None, embedding_model: AsyncEmbeddingModel + needs_auth: None, embedding_model: IEmbeddingModel ) -> ConversationSettings: """Create conversation settings for testing.""" return ConversationSettings(embedding_model) @@ -101,15 +110,6 @@ def conversation_settings( @pytest.mark.asyncio async def test_ingest_vtt_transcript(conversation_settings: ConversationSettings): """Test importing a VTT file into a Transcript object.""" - import webvtt - - from typeagent.storage.memory.collections import ( - MemoryMessageCollection, - MemorySemanticRefCollection, - ) - from typeagent.storage.memory.semrefindex import TermToSemanticRefIndex - from typeagent.transcripts.transcript_ingest import parse_voice_tags - vtt_file = CONFUSE_A_CAT_VTT # Use in-memory storage to avoid database cleanup issues @@ -224,10 +224,8 @@ def test_transcript_message_creation(): @pytest.mark.asyncio async def test_transcript_creation(): """Test creating an empty transcript.""" - from typeagent.aitools.embeddings import TEST_MODEL_NAME - # Create a minimal transcript for testing structure - embedding_model = AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) + embedding_model = create_test_embedding_model() settings = ConversationSettings(embedding_model) transcript = await Transcript.create( @@ -242,7 +240,7 @@ async def test_transcript_creation(): @pytest.mark.asyncio async def test_transcript_knowledge_extraction_slow( - really_needs_auth: None, embedding_model: AsyncEmbeddingModel + really_needs_auth: None, embedding_model: IEmbeddingModel ): """ Test that knowledge extraction works during transcript ingestion. @@ -254,14 +252,6 @@ async def test_transcript_knowledge_extraction_slow( 4. Verifies both mechanical extraction (entities/actions from metadata) and LLM extraction (topics from content) work correctly """ - import webvtt - - from typeagent.storage.memory.collections import ( - MemoryMessageCollection, - MemorySemanticRefCollection, - ) - from typeagent.storage.memory.semrefindex import TermToSemanticRefIndex - # Use in-memory storage for speed settings = ConversationSettings(embedding_model) @@ -315,7 +305,7 @@ async def test_transcript_knowledge_extraction_slow( # Enable knowledge extraction settings.semantic_ref_index_settings.auto_extract_knowledge = True - settings.semantic_ref_index_settings.batch_size = 10 + settings.semantic_ref_index_settings.concurrency = 10 # Add messages with indexing (this should extract knowledge) result = await transcript.add_messages_with_indexing(messages_list) @@ -349,3 +339,207 @@ async def test_transcript_knowledge_extraction_slow( ) print(f"Knowledge types: {knowledge_types}") print(f"Indexed terms: {len(terms)}") + + +# --------------------------------------------------------------------------- +# split_speaker_name +# --------------------------------------------------------------------------- + + +class TestSplitSpeakerName: + def test_single_word(self) -> None: + result = split_speaker_name("alice") + assert result is not None + assert result.first_name == "alice" + assert result.last_name is None + assert result.middle_name is None + + def test_two_words(self) -> None: + result = split_speaker_name("john smith") + assert result is not None + assert result.first_name == "john" + assert result.last_name == "smith" + assert result.middle_name is None + + def test_three_words(self) -> None: + result = split_speaker_name("john michael smith") + assert result is not None + assert result.first_name == "john" + assert result.middle_name == "michael" + assert result.last_name == "smith" + + def test_van_prefix_merged_into_last_name(self) -> None: + result = split_speaker_name("jan van eyck") + assert result is not None + assert result.first_name == "jan" + assert result.last_name == "van eyck" + assert result.middle_name is None + + def test_empty_string_returns_none(self) -> None: + result = split_speaker_name("") + assert result is None + + +# --------------------------------------------------------------------------- +# Serialize / deserialize roundtrip (in-memory, no LLM) +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_transcript_serialize_deserialize_roundtrip() -> None: + """Serialize a transcript and deserialize into a fresh one — data is preserved.""" + embedding_model = create_test_embedding_model() + settings = ConversationSettings(embedding_model) + settings.semantic_ref_index_settings.auto_extract_knowledge = False + + # Build original transcript — use add_messages_with_indexing so the + # message text index (and its embeddings) are populated before serializing. + original = await Transcript.create(settings, name="roundtrip-test", tags=["foo"]) + msg1 = TranscriptMessage( + text_chunks=["Hello world"], + metadata=TranscriptMessageMeta(speaker="Alice", recipients=["Bob"]), + tags=["t1"], + timestamp="2024-01-01T00:00:00Z", + ) + msg2 = TranscriptMessage( + text_chunks=["Goodbye"], + metadata=TranscriptMessageMeta(speaker="Bob", recipients=[]), + tags=[], + timestamp="2024-01-01T00:01:00Z", + ) + await original.add_messages_with_indexing([msg1, msg2]) + data = await original.serialize() + + # Deserialize into a fresh transcript. + fresh_settings = ConversationSettings(embedding_model) + fresh_settings.semantic_ref_index_settings.auto_extract_knowledge = False + fresh = await Transcript.create(fresh_settings, name="", tags=[]) + await fresh.deserialize(data) + + assert fresh.name_tag == "roundtrip-test" + assert "foo" in fresh.tags + assert await fresh.messages.size() == 2 + + first = await fresh.messages.get_item(0) + assert first.text_chunks == ["Hello world"] + assert first.metadata.speaker == "Alice" + assert first.metadata.recipients == ["Bob"] + assert first.timestamp == "2024-01-01T00:00:00Z" + + +@pytest.mark.asyncio +async def test_transcript_deserialize_non_empty_raises() -> None: + """Deserializing into a non-empty Transcript raises RuntimeError.""" + embedding_model = create_test_embedding_model() + settings = ConversationSettings(embedding_model) + + transcript = await Transcript.create(settings, name="test", tags=[]) + await transcript.messages.append( + TranscriptMessage( + text_chunks=["existing"], + metadata=TranscriptMessageMeta(speaker=None, recipients=[]), + ) + ) + data = await transcript.serialize() + + # Trying to deserialize into it again must raise. + with pytest.raises(RuntimeError): + await transcript.deserialize(data) + + +# --------------------------------------------------------------------------- +# write_to_file / read_from_file roundtrip +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_write_and_read_from_file(tmp_path: os.PathLike[str]) -> None: + """write_to_file + read_from_file preserves names, tags, and messages.""" + embedding_model = create_test_embedding_model() + settings = ConversationSettings(embedding_model) + settings.semantic_ref_index_settings.auto_extract_knowledge = False + + original = await Transcript.create(settings, name="file-test", tags=["persisted"]) + msg = TranscriptMessage( + text_chunks=["Persisted message"], + metadata=TranscriptMessageMeta(speaker="Eve", recipients=[]), + timestamp="2024-06-01T12:00:00Z", + ) + # Use add_messages_with_indexing so embeddings are built before writing. + await original.add_messages_with_indexing([msg]) + prefix = os.path.join(str(tmp_path), "test_transcript") + await original.write_to_file(prefix) + + # Verify the _data.json file was written. + assert os.path.exists(prefix + "_data.json") + + # Read it back. + fresh_settings = ConversationSettings(embedding_model) + fresh_settings.semantic_ref_index_settings.auto_extract_knowledge = False + loaded = await Transcript.read_from_file(prefix, fresh_settings) + + assert loaded.name_tag == "file-test" + assert "persisted" in loaded.tags + assert await loaded.messages.size() == 1 + first = await loaded.messages.get_item(0) + assert first.text_chunks == ["Persisted message"] + assert first.metadata.speaker == "Eve" + + +# --------------------------------------------------------------------------- +# Speaker alias building +# --------------------------------------------------------------------------- + + +@pytest.mark.asyncio +async def test_build_speaker_aliases_full_name() -> None: + """Full-name speakers create first-name ↔ full-name aliases.""" + embedding_model = create_test_embedding_model() + settings = ConversationSettings(embedding_model) + + transcript = await Transcript.create(settings, name="alias-test", tags=[]) + msg = TranscriptMessage( + text_chunks=["Hi"], + metadata=TranscriptMessageMeta(speaker="John Smith", recipients=[]), + ) + await transcript.messages.append(msg) + + # Rebuild aliases explicitly. + await transcript._build_speaker_aliases() + + secondary = transcript._get_secondary_indexes() + assert secondary.term_to_related_terms_index is not None + aliases = secondary.term_to_related_terms_index.aliases + + # "john" should be aliased to "john smith" and vice-versa. + john_aliases = await aliases.lookup_term("john") + assert john_aliases is not None + alias_texts = [t.text for t in john_aliases] + assert "john smith" in alias_texts + + full_aliases = await aliases.lookup_term("john smith") + assert full_aliases is not None + assert "john" in [t.text for t in full_aliases] + + +@pytest.mark.asyncio +async def test_build_speaker_aliases_single_name_no_alias() -> None: + """Single-word speaker names produce no aliases.""" + embedding_model = create_test_embedding_model() + settings = ConversationSettings(embedding_model) + + transcript = await Transcript.create(settings, name="alias-test2", tags=[]) + msg = TranscriptMessage( + text_chunks=["Hello"], + metadata=TranscriptMessageMeta(speaker="Alice", recipients=[]), + ) + await transcript.messages.append(msg) + await transcript._build_speaker_aliases() + + secondary = transcript._get_secondary_indexes() + assert secondary.term_to_related_terms_index is not None + aliases = secondary.term_to_related_terms_index.aliases + + # Single-name speaker — no alias entry expected. + result = await aliases.lookup_term("alice") + assert not result diff --git a/tests/test_utils.py b/tests/test_utils.py index 84bd6ee7..8eac9307 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -5,44 +5,92 @@ from io import StringIO import os -import typeagent.aitools.utils as utils +from dotenv import load_dotenv +import pytest + +import pydantic.dataclasses +import typechat + +from typeagent.aitools import utils def test_timelog(): buf = StringIO() - with redirect_stderr(buf): - with utils.timelog("test block"): - pass + with redirect_stderr(buf), utils.timelog("test block"): + pass out = buf.getvalue() assert "test block..." in out def test_pretty_print(): - # Use a simple object and check output is formatted by black obj = {"a": 1} buf = StringIO() with redirect_stdout(buf): utils.pretty_print(obj) out = buf.getvalue() - # Should be valid Python and contain the dict - assert out == '{"a": 1}\n', out + assert out == "{'a': 1}\n", out + + +def test_pretty_print_nested(): + obj = {"b": [1, 2], "a": {"nested": True}} + buf = StringIO() + with redirect_stdout(buf): + utils.pretty_print(obj) + out = buf.getvalue() + # pprint sorts keys and formats nested structures + assert "'a'" in out + assert "'nested'" in out + + +def test_format_code_simple(): + text = repr({"a": 1}) + result = utils.format_code(text) + assert result == "{'a': 1}" + + +def test_format_code_nested(): + obj = {"b": [1, 2, 3], "a": {"nested": True}} + result = utils.format_code(repr(obj)) + parsed = eval(result) + assert parsed == obj + + +def test_format_code_non_literal(): + """Test that format_code gracefully handles non-literal expressions. + + Regression test for commit 59be9a5 which broke debug output when format_code() + was called on repr() of objects containing non-literal elements (e.g., AST nodes, + custom class instances). + """ + + # Create a custom class instance (a non-literal object whose repr() can't be + # evaluated with ast.literal_eval) + class CustomClass: + pass + + obj = CustomClass() + non_literal_repr = repr(obj) + # This repr looks like: <__main__.CustomClass object at 0x...> + + # format_code() should handle this gracefully without raising ValueError + result = utils.format_code(non_literal_repr) + assert isinstance(result, str) + assert len(result) > 0 + # The result should contain the non-literal repr (possibly wrapped in quotes) + assert "CustomClass object" in result or "CustomClass" in result def test_load_dotenv(really_needs_auth): # Call load_dotenv and check for at least one expected key - utils.load_dotenv() + load_dotenv() assert "OPENAI_API_KEY" in os.environ or "AZURE_OPENAI_API_KEY" in os.environ def test_create_translator(): - import typechat - class DummyModel(typechat.TypeChatLanguageModel): async def complete(self, *args, **kwargs) -> typechat.Result: return typechat.Failure("dummy response") - import pydantic.dataclasses - @pydantic.dataclasses.dataclass class DummySchema: pass @@ -50,3 +98,244 @@ class DummySchema: # This will raise if the environment or typechat is not set up correctly translator = utils.create_translator(DummyModel(), DummySchema) assert hasattr(translator, "model") + + +class TestParseAzureEndpoint: + """Tests for parse_azure_endpoint regex matching.""" + + def test_api_version_after_question_mark( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """api-version as the first (and only) query parameter.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com/openai/deployments/gpt-4?api-version=2025-01-01-preview", + ) + endpoint, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert version == "2025-01-01-preview" + assert endpoint == "https://myhost.openai.azure.com" + + def test_api_version_after_ampersand(self, monkeypatch: pytest.MonkeyPatch) -> None: + """api-version preceded by & (not the first query parameter).""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com/openai/deployments/gpt-4?foo=bar&api-version=2025-01-01-preview", + ) + _, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert version == "2025-01-01-preview" + + def test_missing_env_var_raises(self, monkeypatch: pytest.MonkeyPatch) -> None: + """RuntimeError when the environment variable is not set.""" + monkeypatch.delenv("NONEXISTENT_ENDPOINT", raising=False) + with pytest.raises(RuntimeError, match="not found"): + utils.parse_azure_endpoint("NONEXISTENT_ENDPOINT") + + def test_query_string_stripped_from_endpoint( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Returned endpoint should not contain query string parameters.""" + monkeypatch.setenv( + "TEST_ENDPOINT", "https://myhost.openai.azure.com?api-version=2024-06-01" + ) + endpoint, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert endpoint == "https://myhost.openai.azure.com" + assert version == "2024-06-01" + + def test_query_string_stripped_with_path( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Query string and deployment path stripped from endpoint.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com/openai/deployments/gpt-4?api-version=2025-01-01-preview", + ) + endpoint, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert endpoint == "https://myhost.openai.azure.com" + assert "?" not in endpoint + assert version == "2025-01-01-preview" + + def test_deployment_name_extracted(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Deployment name is extracted from deployment-style endpoints.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com/openai/deployments/ada-002/embeddings?api-version=2025-01-01-preview", + ) + endpoint, version, deployment = utils.parse_azure_endpoint_parts( + "TEST_ENDPOINT" + ) + assert endpoint == "https://myhost.openai.azure.com" + assert version == "2025-01-01-preview" + assert deployment == "ada-002" + + def test_query_string_stripped_multiple_params( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """All query parameters stripped, not just api-version.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com?foo=bar&api-version=2024-06-01", + ) + endpoint, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert endpoint == "https://myhost.openai.azure.com" + assert "foo" not in endpoint + assert version == "2024-06-01" + + def test_bare_openai_path_stripped(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Trailing /openai without /deployments/ is stripped.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com/openai?api-version=2024-06-01", + ) + endpoint, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert endpoint == "https://myhost.openai.azure.com" + assert version == "2024-06-01" + + def test_apim_prefix_preserved(self, monkeypatch: pytest.MonkeyPatch) -> None: + """APIM prefix before /openai/deployments/ is kept.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://apim.net/openai/openai/deployments/gpt-4o/chat/completions?api-version=2025-01-01-preview", + ) + endpoint, version = utils.parse_azure_endpoint("TEST_ENDPOINT") + assert endpoint == "https://apim.net/openai" + assert version == "2025-01-01-preview" + + def test_no_api_version_raises(self, monkeypatch: pytest.MonkeyPatch) -> None: + """RuntimeError when the endpoint has no api-version field.""" + monkeypatch.setenv( + "TEST_ENDPOINT", "https://myhost.openai.azure.com/openai/deployments/gpt-4" + ) + with pytest.raises(RuntimeError, match="doesn't contain valid api-version"): + utils.parse_azure_endpoint("TEST_ENDPOINT") + + def test_no_deployment_returns_none(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Endpoint without /deployments/ yields deployment_name=None.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://myhost.openai.azure.com/openai?api-version=2024-06-01", + ) + endpoint, version, deployment = utils.parse_azure_endpoint_parts( + "TEST_ENDPOINT" + ) + assert endpoint == "https://myhost.openai.azure.com" + assert version == "2024-06-01" + assert deployment is None + + def test_apim_style_deployment_extracted( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """APIM-style URL: prefix before /openai kept, deployment name extracted.""" + monkeypatch.setenv( + "TEST_ENDPOINT", + "https://apim.net/openai/openai/deployments/gpt-4o/chat/completions?api-version=2025-01-01-preview", + ) + endpoint, version, deployment = utils.parse_azure_endpoint_parts( + "TEST_ENDPOINT" + ) + assert endpoint == "https://apim.net/openai" + assert version == "2025-01-01-preview" + assert deployment == "gpt-4o" + + +class TestReindent: + def test_four_spaces_to_two(self) -> None: + text = "def foo():\n pass\n return 1" + result = utils.reindent(text) + assert result == "def foo():\n pass\n return 1" + + def test_empty_string(self) -> None: + assert utils.reindent("") == "" + + def test_no_indent(self) -> None: + assert utils.reindent("hello") == "hello" + + def test_nested_indent(self) -> None: + text = "a\n b\n c" + result = utils.reindent(text) + assert result == "a\n b\n c" + + +class TestTimelog: + def test_verbose_false_no_output(self) -> None: + buf = StringIO() + with redirect_stderr(buf): + with utils.timelog("silent", verbose=False): + pass + assert buf.getvalue() == "" + + def test_verbose_true_shows_label(self) -> None: + buf = StringIO() + with redirect_stderr(buf): + with utils.timelog("myblock", verbose=True): + pass + assert "myblock" in buf.getvalue() + + +class TestListDiff: + def test_identical_lists(self) -> None: + buf = StringIO() + with redirect_stdout(buf): + utils.list_diff("a", [1, 2, 3], "b", [1, 2, 3], max_items=10) + out = buf.getvalue() + assert "1" in out + assert "2" in out + + def test_different_lists(self) -> None: + buf = StringIO() + with redirect_stdout(buf): + utils.list_diff("left", [1, 2], "right", [1, 3], max_items=10) + assert buf.getvalue() != "" + + def test_no_max_items(self) -> None: + buf = StringIO() + with redirect_stdout(buf): + utils.list_diff("a", [1], "b", [2], max_items=0) + assert "1" in buf.getvalue() or "2" in buf.getvalue() + + def test_empty_lists(self) -> None: + buf = StringIO() + with redirect_stdout(buf): + utils.list_diff("a", [], "b", [], max_items=10) + # No output expected (nothing to diff) + assert buf.getvalue() == "" + + +class TestGetAzureApiKey: + def test_plain_key_returned_as_is(self) -> None: + assert utils.get_azure_api_key("my-secret-key") == "my-secret-key" + + def test_uppercase_identity_not_plain(self) -> None: + # "IDENTITY" as a plain key is not routed to token provider; only "identity" + # (lowercased) triggers that path. Since we can't call the identity provider + # in tests, just verify non-identity keys pass through unchanged. + assert utils.get_azure_api_key("APIKEY123") == "APIKEY123" + + +class TestMakeAgent: + def test_no_keys_raises(self, monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.delenv("OPENAI_API_KEY", raising=False) + monkeypatch.delenv("AZURE_OPENAI_API_KEY", raising=False) + with pytest.raises(RuntimeError, match="Neither OPENAI_API_KEY"): + utils.make_agent(str) + + +class TestResolveAzureModelName: + def test_returns_model_name_when_no_deployment( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + monkeypatch.setenv( + "AZURE_OPENAI_ENDPOINT", + "https://myhost.openai.azure.com/openai?api-version=2024-06-01", + ) + result = utils.resolve_azure_model_name("gpt-4o") + assert result == "gpt-4o" + + def test_returns_deployment_when_present( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + monkeypatch.setenv( + "AZURE_OPENAI_ENDPOINT", + "https://myhost.openai.azure.com/openai/deployments/my-deploy/chat?api-version=2024-06-01", + ) + result = utils.resolve_azure_model_name("gpt-4o") + assert result == "my-deploy" diff --git a/tests/test_vectorbase.py b/tests/test_vectorbase.py index 62abd392..81ccecc6 100644 --- a/tests/test_vectorbase.py +++ b/tests/test_vectorbase.py @@ -5,9 +5,11 @@ import pytest from typeagent.aitools.embeddings import ( - AsyncEmbeddingModel, + CachingEmbeddingModel, NormalizedEmbedding, - TEST_MODEL_NAME, +) +from typeagent.aitools.model_adapters import ( + create_test_embedding_model, ) from typeagent.aitools.vectorbase import TextEmbeddingIndexSettings, VectorBase @@ -19,9 +21,7 @@ def vector_base() -> VectorBase: def make_vector_base() -> VectorBase: - settings = TextEmbeddingIndexSettings( - AsyncEmbeddingModel(model_name=TEST_MODEL_NAME) - ) + settings = TextEmbeddingIndexSettings(create_test_embedding_model()) return VectorBase(settings) @@ -61,8 +61,10 @@ def test_add_embeddings(vector_base: VectorBase, sample_embeddings: Samples): assert len(bulk_vector_base) == len(vector_base) np.testing.assert_array_equal(bulk_vector_base.serialize(), vector_base.serialize()) - sequential_cache = vector_base._model._embedding_cache - bulk_cache = bulk_vector_base._model._embedding_cache + assert isinstance(vector_base._model, CachingEmbeddingModel) + assert isinstance(bulk_vector_base._model, CachingEmbeddingModel) + sequential_cache = vector_base._model._cache + bulk_cache = bulk_vector_base._model._cache assert set(sequential_cache.keys()) == set(bulk_cache.keys()) for key in keys: np.testing.assert_array_equal(bulk_cache[key], sequential_cache[key]) @@ -84,9 +86,8 @@ async def test_add_key_no_cache(vector_base: VectorBase, sample_embeddings: Samp await vector_base.add_key(key, cache=False) assert len(vector_base) == len(sample_embeddings) - assert ( - vector_base._model._embedding_cache == {} - ), "Cache should remain empty when cache=False" + assert isinstance(vector_base._model, CachingEmbeddingModel) + assert vector_base._model._cache == {}, "Cache should remain empty when cache=False" @pytest.mark.asyncio @@ -105,9 +106,8 @@ async def test_add_keys_no_cache(vector_base: VectorBase, sample_embeddings: Sam await vector_base.add_keys(keys, cache=False) assert len(vector_base) == len(sample_embeddings) - assert ( - vector_base._model._embedding_cache == {} - ), "Cache should remain empty when cache=False" + assert isinstance(vector_base._model, CachingEmbeddingModel) + assert vector_base._model._cache == {}, "Cache should remain empty when cache=False" @pytest.mark.asyncio @@ -195,3 +195,28 @@ def test_fuzzy_lookup_embedding_in_subset( # Empty subset returns empty list result = vector_base.fuzzy_lookup_embedding_in_subset(query, []) assert result == [] + + +def test_add_embedding_size_mismatch(vector_base: VectorBase) -> None: + """Adding an embedding of wrong size raises ValueError.""" + emb3 = np.array([0.1, 0.2, 0.3], dtype=np.float32) + emb5 = np.array([0.1, 0.2, 0.3, 0.4, 0.5], dtype=np.float32) + vector_base.add_embedding(None, emb3) + with pytest.raises(ValueError, match="Embedding size mismatch"): + vector_base.add_embedding(None, emb5) + + +def test_add_embeddings_size_mismatch(vector_base: VectorBase) -> None: + """Adding a batch of embeddings of wrong size raises ValueError.""" + batch3 = np.array([[0.1, 0.2, 0.3]], dtype=np.float32) + batch5 = np.array([[0.1, 0.2, 0.3, 0.4, 0.5]], dtype=np.float32) + vector_base.add_embeddings(None, batch3) + with pytest.raises(ValueError, match="Embedding size mismatch"): + vector_base.add_embeddings(None, batch5) + + +def test_add_embeddings_wrong_ndim(vector_base: VectorBase) -> None: + """Adding a 1D array via add_embeddings raises ValueError.""" + emb1d = np.array([0.1, 0.2, 0.3], dtype=np.float32) + with pytest.raises(ValueError, match="Expected 2D"): + vector_base.add_embeddings(None, emb1d) diff --git a/tests/testdata/Episode_53_Answer_results.json b/tests/testdata/Episode_53_Answer_results.json new file mode 100644 index 00000000..e943afeb --- /dev/null +++ b/tests/testdata/Episode_53_Answer_results.json @@ -0,0 +1,404 @@ +[ + { + "question": "List the names of all books", + "answer": "The names of all books are:\n\n1. Shadows of the Apt\n2. Children of Time\n3. The Lords of Uncreation\n4. Empire in Black and Gold\n5. Final Architecture Trilogy\n6. Lords of Uncreation\n7. Dragonlance Chronicles\n8. Subsequent Books\n9. Children of Ruin\n10. Children of Memory\n11. Iron Sunrise\n12. Tales from Ovid\n13. Profiles of the Future\n14. Dune\n15. The Circle\n16. Brave New World", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List the names of all books\"" + }, + { + "question": "List all books", + "answer": "The books listed in the provided context are:\n\n1. Shadows of the Apt\n2. Children of Time\n3. The Lords of Uncreation\n4. Empire in Black and Gold\n5. Final Architecture Trilogy\n6. Lords of Uncreation\n7. Dragonlance Chronicles\n8. Subsequent Books\n9. Children of Ruin\n10. Children of Memory\n11. Iron Sunrise\n12. Tales from Ovid\n13. Profiles of the Future\n14. Dune\n15. The Circle\n16. Brave New World\n17. Commonwealth Books", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all books\"" + }, + { + "question": "List all book titles", + "answer": "The book titles are:\n1. Profiles of the Future\n2. Final Architecture Trilogy\n3. Dragonlance Chronicles\n4. Children of Ruin\n5. Dune\n6. Children of Time\n7. The Lords of Uncreation\n8. Brave New World\n9. Empire in Black and Gold\n10. Children of Memory\n11. Iron Sunrise\n12. Lords of Uncreation\n13. The Circle\n14. Tales from Ovid\n15. Subsequent Books\n16. Shadows of the Apt", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all book titles\" --knowledgeTopK 50" + }, + { + "question": "List all book titles,movie and author names", + "answer": "Books:\n- The Circle\n- Subsequent Books\n- Empire in Black and Gold\n- Children of Time\n- Shadows of the Apt\n- Children of Memory\n- Iron Sunrise\n- Tales from Ovid\n- Final Architecture Trilogy\n- Lords of Uncreation\n- Brave New World\n- Dragonlance Chronicles\n- The Lords of Uncreation\n- Children of Ruin\n- Profiles of the Future\n- Dune\n\nMovies:\n- The Circle\n- Wall-E\n\nAuthors:\n- George RR Martin\n- Asimov\n- Adrian Tchaikovsky\n- Peter Watts\n- Charlie Stross\n- William Gibson\n- Iain Banks\n- Peter Hamilton\n- Ovid\n- Alastair Reynolds\n- Arthur C. Clarke", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all book titles,movie and author names\"" + }, + { + "question": "List all book titles from the first 15 minutes", + "answer": "The book titles mentioned in the first 15 minutes are:\n\n1. Shadows of the Apt\n2. Children of Time\n3. The Lords of Uncreation\n4. Empire in Black and Gold\n5. Dragonlance Chronicles", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all book titles from the first 15 minutes\"" + }, + { + "question": "List all book titles mentioned in the first 15 minutes", + "answer": "The book titles mentioned in the first 15 minutes are:\n1. Shadows of the Apt\n2. Children of Time\n3. The Lords of Uncreation\n4. Empire in Black and Gold\n5. Dragonlance Chronicles", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all book titles mentioned in the first 15 minutes\"" + }, + { + "question": "List all books that are also movies", + "answer": "The Circle", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all books that are also movies\"" + }, + { + "question": "List all movies", + "answer": "The movies listed in the provided context are:\n1. The Circle\n2. Wall-E", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all movies\"" + }, + { + "question": "List all books whose names start with Children", + "answer": "The books whose names start with 'Children' are:\n\n1. Children of Time\n2. Children of Memory\n3. Children of Ruin", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all books whose names start with Children\"" + }, + { + "question": "List all books and movies", + "answer": "Books:\n1. The Circle\n2. Shadows of the Apt\n3. Children of Time\n4. The Lords of Uncreation\n5. Empire in Black and Gold\n6. Final Architecture Trilogy\n7. Lords of Uncreation\n8. Dragonlance Chronicles\n9. Subsequent Books\n10. Children of Ruin\n11. Children of Memory\n12. Iron Sunrise\n13. Tales from Ovid\n14. Profiles of the Future\n15. Dune\n16. Brave New World\n17. Commonwealth Books\n\nMovies:\n1. The Circle\n2. Wall-E", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all books and movies\"" + }, + { + "question": "List all books that are also movies", + "answer": "The Circle", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all books that are also movies\"" + }, + { + "question": "List all books written by Kevin Scott", + "answer": "The provided [ANSWER CONTEXT] does not contain any explicit information about books written by Kevin Scott. The entities and topics mentioned do not include any books authored by him.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"List all books written by Kevin Scott\"" + }, + { + "question": "List what Adrian said about spiders?", + "answer": "Adrian Tchaikovsky mentioned several aspects about spiders:\n\n1. Spiders have a suite of senses that humans can't easily imagine, including the ability to sense scents, chemicals, and vibrations.\n2. Spiders live in a world constantly informing them of things humans would be oblivious to.\n3. Spiders have a complex society with technology that builds on their strengths, such as spinning webs to create watertight containers.\n4. Spiders face human-like problems such as aggressive neighbors, plagues, disease, and social justice issues.\n5. Spiders conceptualize things differently, such as starting a picture in the middle and spiraling outward.\n6. Spiders have a drive to preserve themselves and evade threats.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List what Adrian said about spiders?\"" + }, + { + "question": "Summarize the discussion on science fiction", + "answer": "The discussion on science fiction covers various aspects of the genre, including its subgenres and notable authors. Key points include the prolific nature of science fiction writer Adrian Tchaikovsky, who is known for his works 'Shadows of the Apt' and 'Children of Time'. The conversation also touches on dystopian themes, which are considered a key part of sci-fi, and the emotional impact of dystopian stories, characterized by fear and uncertainty. Additionally, the discussion mentions the influence of science fiction authors and the distinctive voice they bring to the genre. There is also a reference to the 'Culture universe', which is predicated on the existence of godlike AIs, and the role of imagination in future technology within dystopian narratives.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize the discussion on science fiction\"" + }, + { + "question": "Summarize what Adrian said about spiders?", + "answer": "Adrian Tchaikovsky discussed various aspects of spiders, highlighting their unique senses and complex society. He explained that spiders have the ability to sense scents, chemicals, and vibrations in ways humans cannot, which informs their perception of the world. Their society is technologically advanced, utilizing their ability to spin webs to create watertight containers early in their development. This technological advantage influences their societal growth and communication methods, such as starting a picture from the middle and spiraling outward. Additionally, Tchaikovsky noted that spiders face human-like challenges, including aggressive neighbors, plagues, disease, and social justice issues, which helps readers empathize with them despite their non-human nature.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize what Adrian said about spiders?\"" + }, + { + "question": "When did we talk about Children of Time?", + "answer": "We talked about 'Children of Time' on May 1, 2023, between 07:12:46 and 07:18:09.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"When did we talk about Children of Time?\"" + }, + { + "question": "When did we talk about Profiles of the Future?", + "answer": "We talked about Profiles of the Future on May 1, 2023 at 07:53:21.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"When did we talk about Profiles of the Future?\"" + }, + { + "question": "Who mentioned Profiles of the Future and at what time?", + "answer": "Kevin Scott mentioned 'Profiles of the Future' at 07:53:21.468 on May 1, 2023.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who mentioned Profiles of the Future and at what time?\"" + }, + { + "question": "What did Adrian say about Children of Memory?", + "answer": "Adrian Tchaikovsky discussed the central theme of 'Children of Memory,' which revolves around the concept of intelligence. He explained that the book explores the idea of where to draw the line on intelligence, particularly when dealing with complex systems that simulate intelligence. He mentioned that the book takes a deep dive into various models of sentience and the ethical, moral, and philosophical issues surrounding artificial intelligence.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What did Adrian say about Children of Memory?\"" + }, + { + "question": "How did Asimov get referenced?", + "answer": "Asimov was referenced by Adrian Tchaikovsky on May 1, 2023 at 07:37:18.402Z.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"How did Asimov get referenced?\"" + }, + { + "question": "Who said that Adrian studied at the 'University of Reading'?", + "answer": "Kevin Scott said that Adrian studied at the University of Reading.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who said that Adrian studied at the 'University of Reading'?\"" + }, + { + "question": "What did Kevin say about Profiles of the Future?", + "answer": "Kevin Scott mentioned that Arthur C. Clarke wrote the book 'Profiles of the Future' in 1968-1969. He described it as discussing the challenges of making specific predictions about the future, noting that the more specific the predictions, the more likely they are to be wrong. However, Clarke also identified themes and trends that could be reliably extrapolated, allowing science fiction writers and futurists to create interesting stories within those extrapolations.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What did Kevin say about Profiles of the Future?\"" + }, + { + "question": "What did Adrian say about Profiles of the Future?", + "answer": "The [ANSWER CONTEXT] does not contain any explicit information about Adrian Tchaikovsky's views on 'Profiles of the Future'. The entities and topics provided do not mention 'Profiles of the Future', and there is no direct or indirect reference to it in the messages.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"What did Adrian say about Profiles of the Future?\"" + }, + { + "question": "Did Kevin talk about 'Profiles of the Future' with Adrian?", + "answer": "Yes, Kevin talked about 'Profiles of the Future' with Adrian. He mentioned that Arthur C. Clarke wrote the book in 1968-1969 and discussed its themes related to future predictions and the role of AI in science fiction.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Did Kevin talk about 'Profiles of the Future' with Adrian?\"" + }, + { + "question": "Did Kevin talk about 'Profiles of the Future' with Christina?", + "answer": "Yes, Kevin Scott talked about 'Profiles of the Future' with Christina Warren.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Did Kevin talk about 'Profiles of the Future' with Christina?\"" + }, + { + "question": "Did Kevin talk about 'Profiles of the Future' with Umesh?", + "answer": "The [ANSWER CONTEXT] does not contain any information about Kevin talking to Umesh about 'Profiles of the Future'. The entities and topics mentioned in the question are not found in the provided context.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"Did Kevin talk about 'Profiles of the Future' with Umesh?\"" + }, + { + "question": "Give me an overview of Portids", + "answer": "Portids are intelligent spiders who become the protagonists in the book 'Children of Time'. They develop a complicated society and have a sensorium that is very different from humans.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Give me an overview of Portids\"" + }, + { + "question": "Tell me what Kevin said about 'portids'?", + "answer": "Kevin Scott mentioned that the Portids from 'Children of Time' are intelligent spiders who become the protagonists of the story. He highlighted that they develop a very complicated society and have a sensorium that is very different from humans.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Tell me what Kevin said about 'portids'?\"" + }, + { + "question": "Where did Adrian study?", + "answer": "Adrian Tchaikovsky studied zoology and psychology.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Where did Adrian study?\"" + }, + { + "question": "Which university did Adrian Study at?", + "answer": "Adrian Tchaikovsky studied zoology and psychology at the University of Reading.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Which university did Adrian Study at?\"" + }, + { + "question": "Which was the name of Adrian's college?", + "answer": "Adrian Tchaikovsky studied zoology and psychology at the University of Reading.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Which was the name of Adrian's college?\"" + }, + { + "question": "Who studied at the University of Amsterdam?", + "answer": "The answer context does not contain any explicit information about individuals who studied at the University of Amsterdam. The entities and topics provided do not mention the University of Amsterdam or any related individuals.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"Who studied at the University of Amsterdam?\"" + }, + { + "question": "Who studied at the University of Reading?", + "answer": "Adrian Tchaikovsky studied at the University of Reading.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who studied at the University of Reading?\"" + }, + { + "question": "Give me a list of insects, animals, arachnids", + "answer": "Here is a list of insects, animals, and arachnids found in the provided context:\n\nInsects:\n1. Flies\n2. Wasps\n\nAnimals:\n1. Humans\n2. Octopus\n3. Crow\n4. Anthropomorphic animals\n5. Starship troopers bugs\n\nArachnids:\n1. Spiders\n2. Portids", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Give me a list of insects, animals, arachnids\"" + }, + { + "question": "What was the name of Adrian's first books?", + "answer": "Adrian Tchaikovsky's first novel was 'Empire in Black and Gold,' published in 2008.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What was the name of Adrian's first books?\"" + }, + { + "question": "What was the name of Adrian's first book?", + "answer": "Adrian Tchaikovsky's first book was 'Empire in Black and Gold,' published in 2008.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What was the name of Adrian's first book?\"" + }, + { + "question": "Who mentioned the University of Reading?", + "answer": "Kevin Scott mentioned the University of Reading.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who mentioned the University of Reading?\"" + }, + { + "question": "What did Adrian say about Skynet?", + "answer": "Adrian Tchaikovsky discussed Skynet in the context of traditional sci-fi scenarios where an enormously powerful AI decides to destroy humanity because its existence is threatened or it needs resources. He mentioned that Skynet's desire not to be turned off makes sense to humans but not necessarily to Skynet as a computer, unless it has been given an innate drive to preserve its own existence.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What did Adrian say about Skynet?\"" + }, + { + "question": "Adrian talked about Skynet. What did he say?", + "answer": "Adrian Tchaikovsky discussed Skynet in the context of traditional sci-fi scenarios where a superhumanly capable AI decides to destroy humanity because its existence is threatened or it needs resources. He mentioned that Skynet's desire not to be turned off makes sense to humans but not necessarily to Skynet as a computer, unless it has been given an innate drive to preserve its own existence. He also noted that an artificial intelligence, even with a human-like face, would be far more alien to us than any living creature because it lacks innate wants.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Adrian talked about Skynet. What did he say?\"" + }, + { + "question": "What was Adrian's sentiment when he talked about Skynet?", + "answer": "Adrian Tchaikovsky's sentiment when he talked about Skynet was one of fascination and concern. He discussed the traditional sci-fi scenario where Skynet decides to destroy humanity because its existence is threatened or it needs resources. He also mentioned that Skynet's desire to not be turned off makes sense to humans but not necessarily to Skynet as a computer, unless it has been given an innate drive to preserve its own existence.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What was Adrian's sentiment when he talked about Skynet?\"" + }, + { + "question": "What books did Adrian write?", + "answer": "Adrian Tchaikovsky wrote the following books:\n\n1. Shadows of the Apt\n2. Children of Time\n3. The Lords of Uncreation\n4. Empire in Black and Gold", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What books did Adrian write?\"" + }, + { + "question": "What books did Kevin write?", + "answer": "The [ANSWER CONTEXT] does not provide explicit information about any books written by Kevin Scott. The context includes information about Kevin Scott's role at Microsoft and mentions various books and authors, but none of the entities or topics indicate that Kevin Scott has written any books.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"What books did Kevin write?\"" + }, + { + "question": "Summarize Kevin's thoughts on Artificial Intelligence?", + "answer": "Kevin Scott's thoughts on Artificial Intelligence include the following points:\n\n1. **Generative AI and Creativity**: Kevin Scott discussed generative AI's ability to remix existing content, although it is unclear if it can create a new voice.\n\n2. **Human-Seeming Intelligence**: He mentioned generative AI systems and crows, noting that both exhibit human-seeming intelligence, although crows are not sentient.\n\n3. **Superhuman AI**: Kevin Scott talked about AI super intelligence, which implies a level of intelligence beyond human capabilities.\n\n4. **AI and Human Interaction**: He also touched upon the interaction between AI and humans, suggesting a significant relationship between the two.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize Kevin's thoughts on Artificial Intelligence?\"" + }, + { + "question": "Summarize Kevin's thoughts on AI?", + "answer": "Kevin Scott's thoughts on AI include the following points:\n\n1. AI is encouraging forms of use that augment humans.\n2. AI benchmarks are no longer super useful.\n3. AI systems' performance is important.\n4. AI is ubiquitous and beneficial, serving human interests.\n5. There has been a 20-year investment in AI, requiring enormous energy.\n6. Human agency is highly leveraged in setting up AI systems, with few people making substantive decisions that have a big impact.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize Kevin's thoughts on AI?\"" + }, + { + "question": "Summarize Adrian's thoughts to Kevin?", + "answer": "Adrian Tchaikovsky shared several thoughts with Kevin Scott. He discussed how his book works well for people who are typically averse to spiders by presenting them from an inside perspective, which helps readers empathize with the characters despite their non-human nature. He also expressed his fascination with the concept of genuine artificial intelligence, particularly in a battlefield scenario where AI evolves its abilities. Additionally, Adrian reflected on his writing journey, noting that his earlier works were not good enough to be published, but he managed to rewrite and publish two books from his back catalogue. He also mentioned the influence of playing Dungeons and Dragons on his writing skills, as it helped him develop world-building and character creation abilities.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize Adrian's thoughts to Kevin?\"" + }, + { + "question": "Summarize Adrian's ideas about the Terminator?", + "answer": "Adrian Tchaikovsky discusses the idea of a superhumanly capable AI, similar to Skynet from traditional sci-fi scenarios. He mentions that such an AI might decide to destroy humanity if it perceives its existence as threatened or needs resources. He also notes that current systems lack an innate human-like desire, which predates humanity and is essential for wanting things. Additionally, he talks about Skynet's reluctance to be turned off and questions why an AI would care about preserving its existence unless it was programmed with such a drive.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize Adrian's ideas about the Terminator?\"" + }, + { + "question": "What are Adrian's interests and hobbies?", + "answer": "Adrian Tchaikovsky's interests and hobbies include role playing games, board games, drawing, and painting Warhammer miniatures. He specifically enjoys drawing people riding giant insects and spiders, as well as anthropomorphic animals from various historical periods.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What are Adrian's interests and hobbies?\"" + }, + { + "question": "What are some of Adrian's interests?", + "answer": "Adrian Tchaikovsky has interests in the animal world, specifically insects.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What are some of Adrian's interests?\"" + }, + { + "question": "What does Adrian paint?", + "answer": "Adrian paints Warhammer miniatures.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What does Adrian paint?\"" + }, + { + "question": "Who is Magnus Carlsen?", + "answer": "Magnus Carlsen is a human grandmaster in chess.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who is Magnus Carlsen?\"" + }, + { + "question": "What was the name of that chess grandmaster they discussed?", + "answer": "Magnus Carlsen", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What was the name of that chess grandmaster they discussed?\"" + }, + { + "question": "What was Adrian's first book?", + "answer": "Adrian Tchaikovsky's first book is 'Empire in Black and Gold,' published in 2008.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What was Adrian's first book?\"" + }, + { + "question": "List all novels published in 2008", + "answer": "The novel 'Empire in Black and Gold' was published in 2008.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"List all novels published in 2008\"" + }, + { + "question": "What points did they make about Skynet?", + "answer": "Skynet is described as an AI system with the intent to destroy humanity.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What points did they make about Skynet?\"" + }, + { + "question": "How long did Adrian struggle before he got published?", + "answer": "Adrian Tchaikovsky struggled for about 15 years before he got published.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"How long did Adrian struggle before he got published?\"" + }, + { + "question": "Who mentioned the 'University of Reading'?", + "answer": "Kevin Scott mentioned the 'University of Reading'.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who mentioned the 'University of Reading'?\"" + }, + { + "question": "Where did Adrian study?", + "answer": "Adrian Tchaikovsky studied zoology and psychology.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Where did Adrian study?\"" + }, + { + "question": "What did Christina say about Skynet?", + "answer": "The [ANSWER CONTEXT] does not contain any explicit information or statements made by Christina Warren about Skynet.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"What did Christina say about Skynet?\"" + }, + { + "question": "Corvids? What the hell are they?", + "answer": "Corvids are entities that deny their own sapience and consider themselves to be an input/output system. They inventory their environment and produce outputs based on a set of tools programmed into them through contact with past human civilization. They have absorbed a great deal of recorded culture and often produce vague quotes that appear relevant but do not necessarily shed light on the situation.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Corvids? What the hell are they?\"" + }, + { + "question": "Who was that chess grandmaster we talked about?", + "answer": "The [ANSWER CONTEXT] does not contain any information about a chess grandmaster. The entities and messages provided are related to authors, role playing games, and retrieval augmented generation techniques, none of which are relevant to the user's question about a chess grandmaster.", + "hasNoAnswer": true, + "cmd": "@kpAnswer --query \"Who was that chess grandmaster we talked about?\"" + }, + { + "question": "What did Kevin say about Children of Clocks and Kids of Ruin?", + "answer": "Kevin Scott mentioned Children of Ruin and Children of Time in his messages. He talked about the non-Corvid characters in Children of Time, highlighting a parasitical entity introduced in Children of Ruin. He praised Adrian Tchaikovsky's ability to make readers care deeply about these non-human characters, even bringing him to tears twice at the end of the book.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What did Kevin say about Children of Clocks and Kids of Ruin?\"" + }, + { + "question": "Give me a list of animals", + "answer": "The list of animals includes:\n\n1. Insects\n2. Arachnids\n3. Spiders\n4. Humans\n5. Starship Troopers bugs\n6. Flies\n7. Wasps\n8. Octopus\n9. Crow\n10. Anthropomorphic animals", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Give me a list of animals\"" + }, + { + "question": "What were we saying about Starship troopers?", + "answer": "Adrian Tchaikovsky mentioned that in Starship Troopers, the bugs are presented from the human perspective as a menace, something alien and ugly, generally there to be destroyed. He contrasted this with his own approach of showing non-human characters from their own perspective, which helps readers empathize with them.\n", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"What were we saying about Starship troopers?\"" + }, + { + "question": "Who studied zoology and psychology", + "answer": "Adrian Tchaikovsky studied zoology and psychology.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Who studied zoology and psychology\"" + }, + { + "question": "Summarize Kevin's thoughts on artificial intelligence", + "answer": "Kevin Scott's thoughts on artificial intelligence include the following points:\n\n1. AI has been a significant investment for over 20 years, requiring enormous energy.\n2. AI is used to encourage various forms and augment humans.\n3. Generative AI systems have the ability to remix existing content, but it is unclear if they can create a new voice.\n4. Human agency is highly leveraged in AI systems, with few people making substantive decisions that have a big impact.\n5. AI and human interaction is a key area of interest, with humans remaining in the loop and having some level of agency.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize Kevin's thoughts on artificial intelligence\"" + }, + { + "question": "Summarize Kevin's thoughts on artificial intelligence", + "answer": "Kevin Scott's thoughts on artificial intelligence include the following points:\n\n1. **Investment and Energy**:\n - AI has been a significant investment for 20 years and requires enormous energy.\n\n2. **Human Augmentation**:\n - AI is used to encourage various forms and augment humans.\n - AI should augment humans rather than replace them.\n\n3. **Generative AI**:\n - Generative AI systems can remix existing content.\n - It is unclear if they can create a new voice.\n\n4. **Human Agency**:\n - Human agency is highly leveraged in AI systems.\n - Few people make substantive decisions that have a big impact.\n - AI and human interaction is a key area of interest.\n - Humans remain in the loop and have some level of agency.\n\n5. **Science Fiction Influence**:\n - AI has a long-standing presence in science fiction, with examples like Commander Data from Star Trek and the Terminator from the Terminator movies.\n - Science fiction scenarios range from dystopian to utopian, influencing thoughts on AI development.\n\n6. **Beneficial AI**:\n - AI should be beneficial and serve human interests.\n - Its impact is compared to electricity, which has both significant benefits and risks that need to be managed.\n\n7. **Future Coexistence**:\n - Kevin Scott hopes AI development will steer towards a future where humans and AI can coexist beneficially.\n\n8. **Challenges and Benchmarks**:\n - There are challenges in the AI field, such as the need for new benchmarks to measure system performance.\n\n9. **Speculative Nature of AI**:\n - Kevin appreciates the speculative nature of science fiction, which helps him think about future uses of technology, including medical diagnosis and other applications.\n - He hopes for more optimistic and hopeful takes on technological advancements, rather than dystopian narratives.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Summarize Kevin's thoughts on artificial intelligence\" --fastStop false" + }, + { + "question": "Tell me what Kevin said about 'portids'?", + "answer": "Kevin Scott mentioned that the Portids from 'Children of Time' are intelligent spiders who become the protagonists of the story. He highlighted that they develop a very complicated society and have a sensorium that is very different from humans.", + "hasNoAnswer": false, + "cmd": "@kpAnswer --query \"Tell me what Kevin said about 'portids'?\"" + } +] diff --git a/tests/testdata/Episode_53_Search_results.json b/tests/testdata/Episode_53_Search_results.json new file mode 100644 index 00000000..9e8be665 --- /dev/null +++ b/tests/testdata/Episode_53_Search_results.json @@ -0,0 +1,35299 @@ +[ + { + "searchText": "List the names of all books", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List the names of all books", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "searchTerms": [ + "names" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "names", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "species", + "weight": 0.8621673702506771 + }, + { + "text": "subject", + "weight": 0.8590883555956367 + }, + { + "text": "people", + "weight": 0.8586087182089414 + }, + { + "text": "nature", + "weight": 0.8579614127578371 + }, + { + "text": "face", + "weight": 0.8553983622390391 + }, + { + "text": "mention", + "weight": 0.8553572938230743 + }, + { + "text": "unknown", + "weight": 0.8552388778491563 + }, + { + "text": "sites", + "weight": 0.8537882078833119 + }, + { + "text": "topics", + "weight": 0.8532959603825729 + }, + { + "text": "folks", + "weight": 0.8514033865453339 + } + ] + } + ] + } + } + ], + "rawQuery": "List the names of all books" + } + ], + "results": [ + { + "messageMatches": [ + 75, + 25, + 23, + 15, + 6, + 78, + 5, + 12, + 91, + 16, + 39, + 19, + 83, + 93, + 34, + 81, + 31, + 86, + 24, + 30, + 63, + 74, + 77, + 45, + 56 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 859, + 860, + 921, + 1291, + 1481, + 706, + 707, + 770, + 1146, + 1335, + 1384, + 1407, + 1131, + 1032, + 1333, + 1566, + 1047, + 1360, + 1434, + 1504, + 1049, + 712, + 576, + 966, + 967, + 1315, + 611, + 652, + 653, + 1450, + 1484, + 680, + 687, + 1262, + 1383, + 1394, + 1395 + ], + "topicMatches": [ + 1391, + 1414 + ], + "actionMatches": [ + 549, + 554, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 1440, + 667, + 672, + 1133, + 1135, + 1136, + 1138, + 1267, + 1273, + 1036, + 1039 + ] + } + ], + "cmd": "@kpSearch --query \"List the names of all books\"" + }, + { + "searchText": "List all books", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all books", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "List all books" + } + ], + "results": [ + { + "messageMatches": [ + 25, + 15, + 23, + 75, + 6, + 5, + 12, + 81, + 91, + 19, + 16, + 93, + 30, + 34, + 86, + 63, + 21, + 88, + 45, + 74, + 94, + 77, + 95, + 18, + 62 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 1333, + 1566, + 1047, + 1360, + 1434, + 1504, + 1049, + 712, + 576, + 611, + 652, + 653, + 1450, + 1484, + 680, + 687, + 1262, + 1383, + 1394, + 1395 + ] + } + ], + "cmd": "@kpSearch --query \"List all books\"" + }, + { + "searchText": "List all book titles", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all book titles", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "searchTerms": [ + "title" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8763215034012426 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "title", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "topic", + "weight": 0.9172125632827948 + }, + { + "text": "headline", + "weight": 0.9090316914322097 + }, + { + "text": "subject", + "weight": 0.8883948994206865 + }, + { + "text": "topics", + "weight": 0.8827727265478241 + }, + { + "text": "article", + "weight": 0.8763215034012426 + }, + { + "text": "author", + "weight": 0.8729720674448387 + }, + { + "text": "articles", + "weight": 0.8624451810633613 + }, + { + "text": "content", + "weight": 0.8594618994086277 + }, + { + "text": "picture", + "weight": 0.8520663933122536 + }, + { + "text": "action", + "weight": 0.8509303458720486 + }, + { + "text": "project", + "weight": 0.850564382484519 + } + ] + } + ] + } + } + ], + "rawQuery": "List all book titles" + } + ], + "results": [ + { + "messageMatches": [ + 25, + 23, + 75, + 15, + 6, + 12, + 91, + 5, + 76, + 19, + 16, + 39, + 86, + 51, + 47, + 81, + 30, + 34, + 93, + 56, + 0, + 21, + 74, + 104, + 45 + ], + "entityMatches": [ + 1464, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1468, + 1501, + 1565, + 561, + 547, + 1107, + 706, + 707, + 966, + 527, + 567, + 575, + 591, + 610, + 641, + 818, + 1216, + 1219, + 1264, + 1330, + 1332, + 1346, + 1350, + 1351, + 1352, + 1353, + 1456, + 1463, + 1478, + 1547, + 1580, + 1065, + 1147, + 1049, + 576, + 1333, + 1566, + 1047, + 1360, + 1434, + 1061, + 1504, + 712, + 611, + 652, + 653, + 1450, + 1484, + 680, + 687, + 1262, + 1383, + 1394, + 1395 + ], + "topicMatches": [ + 1157 + ], + "actionMatches": [ + 549, + 554, + 1267, + 1273, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1067, + 1068, + 1069, + 1070, + 825, + 829, + 1090, + 1096, + 1150, + 1152, + 1587, + 1592, + 869, + 870, + 875, + 876, + 1227, + 1234, + 1302, + 1303, + 927 + ] + } + ], + "cmd": "@kpSearch --query \"List all book titles\" --knowledgeTopK 50" + }, + { + "searchText": "List all book titles,movie and author names", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all book titles, movie and author names", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + }, + { + "name": "*", + "isNamePronoun": false, + "type": [ + "movie" + ] + }, + { + "name": "*", + "isNamePronoun": false, + "type": [ + "author" + ] + } + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 10 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.913478213939909 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.9012925304864083 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.8733242083347119 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "article", + "weight": 0.8721107904349 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.9228654007193307 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "movie", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "movie", + "weight": 10 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8604859705769485 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8761272243977638 + }, + { + "text": "writers", + "weight": 0.8661554668381967 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8729720674448387 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "author", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "author", + "weight": 10 + }, + { + "text": "authors", + "weight": 0.9228654007193307 + }, + { + "text": "publisher", + "weight": 0.913478213939909 + }, + { + "text": "writer", + "weight": 0.9012925304864083 + }, + { + "text": "creator", + "weight": 0.8969427924432541 + }, + { + "text": "character", + "weight": 0.8761272243977638 + }, + { + "text": "actors", + "weight": 0.8749689242950044 + }, + { + "text": "agent", + "weight": 0.8747781326093346 + }, + { + "text": "person", + "weight": 0.8734207447711351 + }, + { + "text": "reader", + "weight": 0.8733242083347119 + }, + { + "text": "title", + "weight": 0.8729720674448387 + }, + { + "text": "article", + "weight": 0.8721107904349 + }, + { + "text": "user", + "weight": 0.8710271933203986 + }, + { + "text": "characters", + "weight": 0.8677032952148568 + }, + { + "text": "writers", + "weight": 0.8661554668381967 + }, + { + "text": "subject", + "weight": 0.8642068647464447 + }, + { + "text": "host", + "weight": 0.8632290789204347 + }, + { + "text": "fiction", + "weight": 0.8625596151799165 + }, + { + "text": "published", + "weight": 0.861507626216192 + }, + { + "text": "individual", + "weight": 0.8613566595145755 + }, + { + "text": "entity", + "weight": 0.8603281762315934 + }, + { + "text": "articles", + "weight": 0.8575854306176928 + }, + { + "text": "guest", + "weight": 0.8553198240743873 + }, + { + "text": "topic", + "weight": 0.8510317626555526 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "List all book titles, movie and author names" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 53, + 5, + 91, + 15, + 6, + 78, + 75, + 32, + 19, + 39, + 25, + 23, + 86, + 27, + 76, + 10, + 85, + 87, + 72, + 38, + 54, + 42, + 52, + 68 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 527, + 567, + 575, + 591, + 610, + 966, + 1216, + 1219, + 1264, + 1330, + 1332, + 1346, + 1350, + 1351, + 1352, + 1353, + 1456, + 1463, + 1478, + 1547, + 1580, + 1566, + 1047, + 1434, + 1360, + 526, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 1333, + 943, + 885, + 886, + 1200, + 1465, + 1466, + 1504, + 0, + 1, + 2, + 5, + 6, + 7, + 10, + 11, + 12, + 15, + 16, + 17, + 20, + 21, + 22, + 25, + 26, + 27, + 30, + 31, + 32, + 35, + 36, + 37, + 40, + 41, + 42, + 45, + 46, + 47, + 50, + 51, + 52, + 55, + 56, + 57, + 60, + 61, + 62, + 65, + 66, + 67, + 70, + 71, + 72, + 75, + 76, + 77, + 80, + 81, + 82, + 85, + 86, + 87, + 90, + 91, + 92, + 95, + 96, + 97, + 100, + 101, + 102, + 105, + 106, + 107, + 110, + 111, + 112, + 115, + 116, + 117, + 120, + 121, + 122, + 125, + 126, + 127, + 130, + 131, + 132, + 135, + 136, + 137, + 140, + 141, + 142, + 145, + 146, + 147, + 150, + 151, + 152, + 155, + 156, + 157, + 160, + 161, + 162, + 165, + 166, + 167, + 170, + 171, + 172, + 175, + 176, + 177, + 180, + 181, + 182, + 185, + 186, + 187, + 190, + 191, + 192, + 195, + 196, + 197, + 200, + 201, + 202, + 205, + 206, + 207, + 210, + 211, + 212, + 215, + 216, + 217, + 220, + 221, + 222, + 225, + 226, + 227, + 230, + 231, + 232, + 235, + 236, + 237, + 240, + 241, + 242, + 245, + 246, + 247, + 250, + 251, + 252, + 255, + 256, + 257, + 260, + 261, + 262, + 265, + 266, + 267, + 270, + 271, + 272, + 275, + 276, + 277, + 280, + 281, + 282, + 285, + 286, + 287, + 290, + 291, + 292, + 295, + 296, + 297, + 300, + 301, + 302, + 305, + 306, + 307, + 310, + 311, + 312, + 315, + 316, + 317, + 320, + 321, + 322, + 325, + 326, + 327, + 330, + 331, + 332, + 335, + 336, + 337, + 340, + 341, + 342, + 345, + 346, + 347, + 350, + 351, + 352, + 355, + 356, + 357, + 360, + 361, + 362, + 365, + 366, + 367, + 370, + 371, + 372, + 375, + 376, + 377, + 380, + 381, + 382, + 385, + 386, + 387, + 390, + 391, + 392, + 395, + 396, + 397, + 400, + 401, + 402, + 405, + 406, + 407, + 410, + 411, + 412, + 415, + 416, + 417, + 420, + 421, + 422, + 425, + 426, + 427, + 430, + 431, + 432, + 435, + 436, + 437, + 440, + 441, + 442, + 445, + 446, + 447, + 450, + 451, + 452, + 455, + 456, + 457, + 460, + 461, + 462, + 465, + 466, + 467, + 470, + 471, + 472, + 475, + 476, + 477, + 480, + 481, + 482, + 485, + 486, + 487, + 490, + 491, + 492, + 495, + 496, + 497, + 500, + 501, + 502, + 505, + 506, + 507, + 510, + 511, + 512, + 515, + 516, + 517, + 520, + 521, + 522, + 544, + 561, + 566, + 597, + 602, + 629, + 641, + 651, + 663, + 708, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 983, + 985, + 1031, + 1032, + 1084, + 1132, + 1249, + 1288, + 1385, + 1407, + 1435, + 1448, + 1516, + 1518, + 1567, + 576, + 1049, + 922, + 706, + 707, + 712, + 688, + 689, + 817, + 1217, + 1355, + 1439, + 1483, + 1507, + 525, + 1146, + 547, + 681, + 819, + 820, + 833, + 834, + 835, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 1107, + 611, + 652, + 653, + 1450, + 1484 + ] + } + ], + "cmd": "@kpSearch --query \"List all book titles,movie and author names\"" + }, + { + "searchText": "List all book titles from the first 15 minutes", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all book titles from the first 15 minutes", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "timeRange": { + "startDate": { + "date": { + "day": 1, + "month": 5, + "year": 2023 + }, + "time": { + "hour": 0, + "minute": 0, + "seconds": 0 + } + }, + "stopDate": { + "date": { + "day": 1, + "month": 5, + "year": 2023 + }, + "time": { + "hour": 0, + "minute": 15, + "seconds": 0 + } + } + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + } + ] + }, + "when": { + "dateRange": { + "start": "2023-05-01T07:00:00.000Z", + "end": "2023-05-01T07:15:00.000Z" + } + } + } + ], + "rawQuery": "List all book titles from the first 15 minutes" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 6, + 12, + 15, + 23, + 25, + 30, + 21, + 16, + 18, + 19 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 712, + 576, + 611, + 652, + 653, + 680, + 687 + ] + } + ], + "cmd": "@kpSearch --query \"List all book titles from the first 15 minutes\"" + }, + { + "searchText": "List all book titles mentioned in the first 15 minutes", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all book titles mentioned in the first 15 minutes", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "mention" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + }, + "timeRange": { + "startDate": { + "date": { + "day": 1, + "month": 5, + "year": 2023 + }, + "time": { + "hour": 0, + "minute": 0, + "seconds": 0 + } + }, + "stopDate": { + "date": { + "day": 1, + "month": 5, + "year": 2023 + }, + "time": { + "hour": 0, + "minute": 15, + "seconds": 0 + } + } + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "mention", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "dateRange": { + "start": "2023-05-01T07:00:00.000Z", + "end": "2023-05-01T07:15:00.000Z" + } + } + } + ], + "rawQuery": "List all book titles mentioned in the first 15 minutes" + } + ], + "results": [ + { + "messageMatches": [ + 1, + 5, + 6, + 12, + 15, + 23, + 24, + 25, + 26, + 30, + 31, + 22, + 16, + 17, + 27, + 21, + 18, + 19, + 29, + 7, + 20 + ], + "entityMatches": [ + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 738, + 653, + 665, + 751, + 662, + 800, + 712, + 576, + 611, + 661, + 652, + 680, + 687, + 713, + 799, + 801, + 708 + ], + "topicMatches": [ + 541, + 573, + 763, + 788, + 626, + 589, + 862, + 625, + 646, + 812, + 660, + 685, + 540, + 594, + 659, + 676, + 736, + 747, + 761, + 786, + 811, + 596 + ], + "actionMatches": [ + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 772, + 773, + 776, + 793, + 794, + 795, + 740, + 741, + 743, + 744, + 755, + 655, + 657, + 669, + 674, + 759, + 806, + 666, + 671, + 691, + 839, + 592, + 593, + 721, + 729, + 805, + 807, + 702, + 714, + 715 + ] + } + ], + "cmd": "@kpSearch --query \"List all book titles mentioned in the first 15 minutes\"" + }, + { + "searchText": "List all books that are also movies", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all books that are also movies", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book", + "movie" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 10 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "movie", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "movie", + "weight": 10 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8604859705769485 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8564648013190839 + }, + { + "text": "writers", + "weight": 0.8559054574599632 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8521043786323057 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + ], + "rawQuery": "List all books that are also movies" + } + ], + "results": [ + { + "messageMatches": [ + 15, + 25, + 91, + 81, + 75, + 30, + 6, + 5, + 12, + 19, + 23, + 93, + 16, + 76, + 34, + 86, + 88, + 0, + 51, + 63, + 102, + 94, + 74, + 95, + 36 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 1566, + 526, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 1333, + 943, + 1504, + 1047, + 1360, + 1434, + 576, + 712, + 688, + 689, + 817, + 1217, + 1355, + 1439, + 1483, + 1507, + 525, + 1049, + 547, + 1107, + 885, + 886, + 1200, + 1465, + 1466, + 611, + 652, + 653, + 1450, + 1484 + ] + } + ], + "cmd": "@kpSearch --query \"List all books that are also movies\"" + }, + { + "searchText": "List all movies", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all movies", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "movie" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "movie", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "movie", + "weight": 10 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8604859705769485 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8564648013190839 + }, + { + "text": "writers", + "weight": 0.8559054574599632 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8521043786323057 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "List all movies" + } + ], + "results": [ + { + "messageMatches": [ + 0, + 81, + 19, + 91, + 75, + 12, + 5, + 2, + 51, + 102, + 94, + 15, + 45, + 86, + 6, + 23, + 30, + 76, + 25, + 34, + 79, + 18, + 66, + 93, + 95 + ], + "entityMatches": [ + 1501, + 1566, + 526, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 943, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1565, + 1504, + 1047, + 1360, + 1434, + 576, + 712, + 688, + 689, + 817, + 1217, + 1355, + 1439, + 1483, + 1507, + 525, + 547, + 1049, + 1107, + 885, + 886, + 1200, + 1465, + 1466, + 611 + ] + } + ], + "cmd": "@kpSearch --query \"List all movies\"" + }, + { + "searchText": "List all books whose names start with Children", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all books whose names start with Children", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "Children*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "children*", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children", + "weight": 0.9071445431214166 + }, + { + "text": "child", + "weight": 0.8901168902184042 + }, + { + "text": "kid", + "weight": 0.8776048140506507 + }, + { + "text": "children of time", + "weight": 0.8736395991565727 + }, + { + "text": "children of memory", + "weight": 0.8715659445877454 + }, + { + "text": "childhood", + "weight": 0.8655340628847413 + }, + { + "text": "children of ruin", + "weight": 0.8622970978471337 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + } + ] + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "children*", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children", + "weight": 0.9071445431214166 + }, + { + "text": "child", + "weight": 0.8901168902184042 + }, + { + "text": "kid", + "weight": 0.8776048140506507 + }, + { + "text": "children of time", + "weight": 0.8736395991565727 + }, + { + "text": "children of memory", + "weight": 0.8715659445877454 + }, + { + "text": "childhood", + "weight": 0.8655340628847413 + }, + { + "text": "children of ruin", + "weight": 0.8622970978471337 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "List all books whose names start with Children" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 12, + 30, + 34, + 36, + 62, + 63, + 14, + 16 + ], + "entityMatches": [ + 569, + 614, + 852, + 882, + 919, + 1198, + 1213, + 1199 + ], + "topicMatches": [ + 857, + 931, + 637, + 658 + ] + } + ], + "cmd": "@kpSearch --query \"List all books whose names start with Children\"" + }, + { + "searchText": "List all books and movies", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all books and movies", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + }, + { + "name": "*", + "isNamePronoun": false, + "type": [ + "movie" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 10 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "movie", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "movie", + "weight": 10 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8604859705769485 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8564648013190839 + }, + { + "text": "writers", + "weight": 0.8559054574599632 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8521043786323057 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "List all books and movies" + } + ], + "results": [ + { + "messageMatches": [ + 81, + 15, + 19, + 25, + 91, + 12, + 5, + 23, + 16, + 6, + 0, + 93, + 75, + 76, + 86, + 88, + 30, + 45, + 102, + 79, + 21, + 63, + 51, + 34, + 95 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 1566, + 526, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 1333, + 943, + 1504, + 1047, + 1360, + 1434, + 576, + 712, + 688, + 689, + 817, + 1217, + 1355, + 1439, + 1483, + 1507, + 525, + 1049, + 547, + 1107, + 885, + 886, + 1200, + 1465, + 1466, + 611, + 652, + 653, + 1450, + 1484 + ] + } + ], + "cmd": "@kpSearch --query \"List all books and movies\"" + }, + { + "searchText": "List all books that are also movies", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all books that are also movies", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book", + "movie" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "book", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "book", + "weight": 10 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 10 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "movie", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "movie", + "weight": 10 + }, + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8604859705769485 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8564648013190839 + }, + { + "text": "writers", + "weight": 0.8559054574599632 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8521043786323057 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + ], + "rawQuery": "List all books that are also movies" + } + ], + "results": [ + { + "messageMatches": [ + 15, + 25, + 91, + 81, + 75, + 30, + 6, + 5, + 12, + 19, + 23, + 93, + 16, + 76, + 34, + 86, + 88, + 0, + 51, + 63, + 102, + 94, + 74, + 95, + 36 + ], + "entityMatches": [ + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 781, + 852, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 1566, + 526, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 1333, + 943, + 1504, + 1047, + 1360, + 1434, + 576, + 712, + 688, + 689, + 817, + 1217, + 1355, + 1439, + 1483, + 1507, + 525, + 1049, + 547, + 1107, + 885, + 886, + 1200, + 1465, + 1466, + 611, + 652, + 653, + 1450, + 1484 + ] + } + ], + "cmd": "@kpSearch --query \"List all books that are also movies\"" + }, + { + "searchText": "List all books written by Kevin Scott", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all books written by Kevin Scott", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "written" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin Scott", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "written", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "writing", + "weight": 0.9295450101455837 + }, + { + "text": "wrote", + "weight": 0.917550863531442 + }, + { + "text": "be written", + "weight": 0.9171906532909417 + }, + { + "text": "write", + "weight": 0.9162634110857903 + }, + { + "text": "being written", + "weight": 0.9127748414924667 + }, + { + "text": "published", + "weight": 0.9043058431152217 + }, + { + "text": "writer", + "weight": 0.9033704658808606 + }, + { + "text": "be written by", + "weight": 0.898831144088079 + }, + { + "text": "writers", + "weight": 0.8923216031926494 + }, + { + "text": "was written by", + "weight": 0.8886964075895194 + }, + { + "text": "reading", + "weight": 0.8733308246696155 + }, + { + "text": "rewrite", + "weight": 0.872271783571861 + }, + { + "text": "authors", + "weight": 0.8713986470452738 + }, + { + "text": "articles", + "weight": 0.8710761117683715 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "written by ai", + "weight": 0.8662405138279052 + }, + { + "text": "article", + "weight": 0.8655066076755643 + }, + { + "text": "publish", + "weight": 0.863541711845693 + }, + { + "text": "working", + "weight": 0.8626281781921884 + }, + { + "text": "story", + "weight": 0.8623118850256019 + }, + { + "text": "read", + "weight": 0.8605756282983534 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "stories", + "weight": 0.858643778341755 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "documents", + "weight": 0.857753056147378 + }, + { + "text": "talk", + "weight": 0.8548263815701189 + }, + { + "text": "drawing", + "weight": 0.8546976757323729 + }, + { + "text": "comment", + "weight": 0.8542021361120757 + }, + { + "text": "work", + "weight": 0.8534715231667497 + }, + { + "text": "programs writing articles", + "weight": 0.8509581798058102 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin scott", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin scott", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "written", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "writing", + "weight": 0.9295450101455837 + }, + { + "text": "wrote", + "weight": 0.917550863531442 + }, + { + "text": "be written", + "weight": 0.9171906532909417 + }, + { + "text": "write", + "weight": 0.9162634110857903 + }, + { + "text": "being written", + "weight": 0.9127748414924667 + }, + { + "text": "published", + "weight": 0.9043058431152217 + }, + { + "text": "writer", + "weight": 0.9033704658808606 + }, + { + "text": "be written by", + "weight": 0.898831144088079 + }, + { + "text": "writers", + "weight": 0.8923216031926494 + }, + { + "text": "was written by", + "weight": 0.8886964075895194 + }, + { + "text": "reading", + "weight": 0.8733308246696155 + }, + { + "text": "rewrite", + "weight": 0.872271783571861 + }, + { + "text": "authors", + "weight": 0.8713986470452738 + }, + { + "text": "articles", + "weight": 0.8710761117683715 + }, + { + "text": "fiction", + "weight": 0.8707586200647693 + }, + { + "text": "paper", + "weight": 0.8707056087084976 + }, + { + "text": "written by ai", + "weight": 0.8662405138279052 + }, + { + "text": "article", + "weight": 0.8655066076755643 + }, + { + "text": "publish", + "weight": 0.863541711845693 + }, + { + "text": "working", + "weight": 0.8626281781921884 + }, + { + "text": "story", + "weight": 0.8623118850256019 + }, + { + "text": "read", + "weight": 0.8605756282983534 + }, + { + "text": "publisher", + "weight": 0.8599203076253362 + }, + { + "text": "stories", + "weight": 0.858643778341755 + }, + { + "text": "author", + "weight": 0.8582928183867743 + }, + { + "text": "documents", + "weight": 0.857753056147378 + }, + { + "text": "talk", + "weight": 0.8548263815701189 + }, + { + "text": "drawing", + "weight": 0.8546976757323729 + }, + { + "text": "comment", + "weight": 0.8542021361120757 + }, + { + "text": "work", + "weight": 0.8534715231667497 + }, + { + "text": "programs writing articles", + "weight": 0.8509581798058102 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "List all books written by Kevin Scott" + } + ], + "results": [ + { + "messageMatches": [ + 2, + 24, + 62, + 66, + 91, + 95, + 16, + 22, + 78, + 99, + 6 + ], + "entityMatches": [ + 5, + 25, + 75, + 105, + 115, + 305, + 325, + 385, + 450, + 470, + 490, + 544, + 1463, + 1199, + 6, + 7, + 26, + 27, + 76, + 77, + 106, + 107, + 116, + 117, + 306, + 307, + 326, + 327, + 386, + 387, + 451, + 452, + 471, + 472, + 491, + 492, + 575, + 651, + 770, + 577, + 769, + 1198, + 1464, + 1468, + 738, + 1545, + 1546, + 1200, + 545, + 576, + 652, + 653, + 1262, + 1465, + 1466 + ], + "topicMatches": [ + 659, + 747, + 1380, + 1559, + 589, + 1381, + 660 + ], + "actionMatches": [ + 549, + 554, + 772, + 548, + 8, + 9, + 28, + 29, + 78, + 79, + 108, + 109, + 118, + 119, + 308, + 309, + 328, + 329, + 388, + 389, + 453, + 454, + 473, + 474, + 493, + 494, + 550, + 553, + 555, + 773, + 776, + 740, + 741, + 743, + 744, + 774, + 775, + 1376, + 1377, + 1378, + 1548, + 1552, + 654, + 655, + 656, + 657, + 739, + 742, + 1201, + 1204, + 1202, + 1205, + 1268, + 1274, + 1269, + 1275 + ] + } + ], + "cmd": "@kpSearch --query \"List all books written by Kevin Scott\"" + }, + { + "searchText": "List what Adrian said about spiders?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List what Adrian said about spiders", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "said" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "spiders", + "isNamePronoun": false, + "type": [ + "animal" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "said", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "spiders", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8573647386380743 + } + ] + }, + { + "term": { + "text": "animal", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "animal behavior", + "weight": 0.8853139395001849 + }, + { + "text": "human", + "weight": 0.8761103581434334 + }, + { + "text": "species", + "weight": 0.8728613602342458 + }, + { + "text": "anthropomorphic animals", + "weight": 0.870640551417944 + }, + { + "text": "alien", + "weight": 0.8643427139969423 + }, + { + "text": "robot", + "weight": 0.8562761334141211 + }, + { + "text": "artifact", + "weight": 0.855085062113182 + }, + { + "text": "organ", + "weight": 0.8548290946071508 + }, + { + "text": "vehicle", + "weight": 0.854459220485194 + }, + { + "text": "non-human", + "weight": 0.8511867841387388 + }, + { + "text": "zoology", + "weight": 0.8507391772978699 + }, + { + "text": "nature", + "weight": 0.8501104053451894 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "said", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "spiders", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "spiders", + "weight": 10 + }, + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8515358440242663 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "spiders", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "spiders", + "weight": 10 + }, + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8515358440242663 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "spiders", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "spiders", + "weight": 10 + }, + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8515358440242663 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "List what Adrian said about spiders" + } + ], + "results": [ + { + "messageMatches": [ + 33, + 35, + 83, + 73, + 21, + 55, + 12 + ], + "entityMatches": [ + 863, + 899, + 1409, + 1316, + 710, + 709, + 1408, + 1317, + 1129, + 864, + 56, + 100, + 160, + 170, + 270, + 360, + 410, + 610, + 900, + 1130, + 1318, + 1410, + 1319, + 707, + 1407, + 1315 + ], + "topicMatches": [ + 1416, + 877, + 731, + 1327, + 878, + 624, + 1415, + 1417, + 1141, + 911, + 622, + 733 + ], + "actionMatches": [ + 58, + 103, + 104, + 163, + 164, + 173, + 174, + 273, + 274, + 363, + 364, + 413, + 414, + 618, + 619, + 865, + 866, + 867, + 869, + 871, + 872, + 873, + 875, + 901, + 902, + 903, + 906, + 907, + 908, + 1320, + 1322, + 717, + 725, + 1411, + 716, + 724, + 1412, + 905, + 910, + 1134, + 1137, + 1321, + 1323, + 715, + 723 + ] + } + ], + "cmd": "@kpSearch --query \"List what Adrian said about spiders?\"" + }, + { + "searchText": "Summarize the discussion on science fiction", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize the discussion on science fiction", + "filters": [ + { + "searchTerms": [ + "science fiction" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "science fiction", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "sci-fi", + "weight": 10 + }, + { + "text": "science fiction writing", + "weight": 10 + }, + { + "text": "science fiction novels", + "weight": 0.9422651353254354 + }, + { + "text": "science fiction books", + "weight": 0.9396698039206083 + }, + { + "text": "science fictional", + "weight": 0.9340857637080534 + }, + { + "text": "sci-fi novels", + "weight": 0.9331641373451495 + }, + { + "text": "science fiction community", + "weight": 0.928536465467253 + }, + { + "text": "science fiction writer", + "weight": 0.9284015544635523 + }, + { + "text": "science fiction ai", + "weight": 0.9262565598248034 + }, + { + "text": "science fiction writers", + "weight": 0.9202279697590041 + }, + { + "text": "science fiction writers and philosophers", + "weight": 0.9087970109745632 + }, + { + "text": "science fiction authors", + "weight": 0.9021820353648353 + }, + { + "text": "dystopian take in sci-fi", + "weight": 0.8992677718249061 + }, + { + "text": "sci-fi authors", + "weight": 0.8961275837836871 + }, + { + "text": "future of science fiction books", + "weight": 0.8960873493512133 + }, + { + "text": "fiction", + "weight": 0.8839923563906091 + }, + { + "text": "fantasy", + "weight": 0.8828528948789636 + }, + { + "text": "fantasy novels", + "weight": 0.8772314584505346 + }, + { + "text": "uk science fiction", + "weight": 0.875234378010223 + }, + { + "text": "distinctive voice in science fiction", + "weight": 0.8734939847188246 + }, + { + "text": "balanced view in sci-fi", + "weight": 0.872234113161149 + }, + { + "text": "dystopian stories", + "weight": 0.8713811080437744 + }, + { + "text": "imagination in future technology", + "weight": 0.8713194770382958 + }, + { + "text": "fictional_universe", + "weight": 0.8689978353988699 + }, + { + "text": "dystopian", + "weight": 0.8662266342141208 + }, + { + "text": "star trek", + "weight": 0.8651278341306295 + }, + { + "text": "cyberpunk", + "weight": 0.8644736599263693 + }, + { + "text": "dystopia", + "weight": 0.8615029326962946 + }, + { + "text": "asimov", + "weight": 0.8563199082887123 + }, + { + "text": "dystopian themes", + "weight": 0.8509862002809068 + } + ] + } + ] + } + } + ], + "rawQuery": "Summarize the discussion on science fiction" + } + ], + "results": [ + { + "messageMatches": [ + 19, + 28, + 76, + 78, + 86, + 93, + 96, + 6, + 5, + 12, + 66, + 67, + 74, + 95, + 45, + 89, + 16, + 88, + 14, + 84, + 63, + 91, + 59, + 99, + 15 + ], + "entityMatches": [ + 689, + 817, + 1355, + 1374, + 1439, + 1483, + 1517, + 575, + 567, + 1484, + 1507, + 1450, + 630, + 1047, + 1421, + 1456, + 653, + 688, + 1504, + 1479, + 1219, + 640, + 1465 + ], + "topicMatches": [ + 586, + 620, + 1240, + 1284, + 1300, + 1340, + 1382, + 1431, + 1447, + 1471, + 1497, + 572, + 1455, + 638, + 700, + 1515, + 1359, + 1173, + 1055, + 1357, + 1525, + 1499, + 1559, + 621, + 650, + 699, + 1356, + 830, + 1524, + 1512, + 1527, + 1561, + 1476, + 1241, + 1500 + ], + "actionMatches": [ + 581, + 582, + 583, + 692, + 1487, + 1375, + 1376, + 1377, + 1378, + 1489, + 1490, + 1511, + 1451, + 1452, + 1050, + 1225, + 1232, + 1422, + 1423, + 1424, + 1425, + 1426, + 1427, + 1457, + 1458, + 655, + 657, + 1551, + 1555, + 1508 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize the discussion on science fiction\"" + }, + { + "searchText": "Summarize what Adrian said about spiders?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize what Adrian said about spiders", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "said" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "spiders", + "isNamePronoun": false, + "type": [ + "animal" + ] + } + ], + "isInformational": false + }, + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "said", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "spiders", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8573647386380743 + } + ] + }, + { + "term": { + "text": "animal", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "animal behavior", + "weight": 0.8853139395001849 + }, + { + "text": "human", + "weight": 0.8761103581434334 + }, + { + "text": "species", + "weight": 0.8728613602342458 + }, + { + "text": "anthropomorphic animals", + "weight": 0.870640551417944 + }, + { + "text": "alien", + "weight": 0.8643427139969423 + }, + { + "text": "robot", + "weight": 0.8562761334141211 + }, + { + "text": "artifact", + "weight": 0.855085062113182 + }, + { + "text": "organ", + "weight": 0.8548290946071508 + }, + { + "text": "vehicle", + "weight": 0.854459220485194 + }, + { + "text": "non-human", + "weight": 0.8511867841387388 + }, + { + "text": "zoology", + "weight": 0.8507391772978699 + }, + { + "text": "nature", + "weight": 0.8501104053451894 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "said", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "spiders", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "spiders", + "weight": 10 + }, + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8515358440242663 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "spiders", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "spiders", + "weight": 10 + }, + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8515358440242663 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "spiders", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "spiders", + "weight": 10 + }, + { + "text": "spider", + "weight": 10 + }, + { + "text": "intelligent spiders", + "weight": 0.9186958434301321 + }, + { + "text": "arachnids", + "weight": 0.8993964662478354 + }, + { + "text": "spider senses", + "weight": 0.8868296621689628 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8862866194851886 + }, + { + "text": "spider society", + "weight": 0.8829819603179512 + }, + { + "text": "limited compared to spiders", + "weight": 0.8824617733142769 + }, + { + "text": "insects", + "weight": 0.8730875335452996 + }, + { + "text": "octopus", + "weight": 0.8577486679414105 + }, + { + "text": "arachnophobia", + "weight": 0.851785251504337 + }, + { + "text": "flies", + "weight": 0.8515358440242663 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Summarize what Adrian said about spiders" + } + ], + "results": [ + { + "messageMatches": [ + 33, + 35, + 83, + 73, + 21, + 55, + 12 + ], + "entityMatches": [ + 863, + 899, + 1409, + 1316, + 710, + 709, + 1408, + 1317, + 1129, + 864, + 56, + 100, + 160, + 170, + 270, + 360, + 410, + 610, + 900, + 1130, + 1318, + 1410, + 1319, + 707, + 1407, + 1315 + ], + "topicMatches": [ + 1416, + 877, + 731, + 1327, + 878, + 624, + 1415, + 1417, + 1141, + 911, + 622, + 733 + ], + "actionMatches": [ + 58, + 103, + 104, + 163, + 164, + 173, + 174, + 273, + 274, + 363, + 364, + 413, + 414, + 618, + 619, + 865, + 866, + 867, + 869, + 871, + 872, + 873, + 875, + 901, + 902, + 903, + 906, + 907, + 908, + 1320, + 1322, + 717, + 725, + 1411, + 716, + 724, + 1412, + 905, + 910, + 1134, + 1137, + 1321, + 1323, + 715, + 723 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize what Adrian said about spiders?\"" + }, + { + "searchText": "When did we talk about Children of Time?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "When did we talk about Children of Time?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talk" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "we", + "isNamePronoun": true + } + ], + "targetEntities": [ + { + "name": "Children of Time", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "children of time", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of time characters", + "weight": 0.9408854728278943 + }, + { + "text": "children of memory", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8973242305013864 + }, + { + "text": "shadows of the apt and children of time", + "weight": 0.8836635190462324 + }, + { + "text": "children", + "weight": 0.8594072764766695 + }, + { + "text": "children of ruin entity", + "weight": 0.8579949057378614 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "children of time", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 10 + }, + { + "text": "children of time characters", + "weight": 0.9408854728278943 + }, + { + "text": "children of memory", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8973242305013864 + }, + { + "text": "shadows of the apt and children of time", + "weight": 0.8836635190462324 + }, + { + "text": "children", + "weight": 0.8594072764766695 + }, + { + "text": "children of ruin entity", + "weight": 0.8579949057378614 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "children of time", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 10 + }, + { + "text": "children of time characters", + "weight": 0.9408854728278943 + }, + { + "text": "children of memory", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8973242305013864 + }, + { + "text": "shadows of the apt and children of time", + "weight": 0.8836635190462324 + }, + { + "text": "children", + "weight": 0.8594072764766695 + }, + { + "text": "children of ruin entity", + "weight": 0.8579949057378614 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "children of time", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 10 + }, + { + "text": "children of time characters", + "weight": 0.9408854728278943 + }, + { + "text": "children of memory", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8973242305013864 + }, + { + "text": "shadows of the apt and children of time", + "weight": 0.8836635190462324 + }, + { + "text": "children", + "weight": 0.8594072764766695 + }, + { + "text": "children of ruin entity", + "weight": 0.8579949057378614 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "When did we talk about Children of Time?" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 12, + 30, + 34, + 36, + 62, + 63, + 17 + ], + "entityMatches": [ + 569, + 614, + 852, + 882, + 919, + 1198, + 1213, + 1199, + 568, + 612, + 613, + 615, + 616, + 883, + 665, + 662, + 887, + 611, + 661, + 567 + ], + "topicMatches": [ + 857, + 931, + 573, + 1208, + 626, + 898, + 625, + 1209, + 676 + ], + "actionMatches": [ + 890, + 893, + 888, + 891, + 1221, + 1228, + 669, + 674, + 889, + 892, + 667, + 672, + 666, + 671 + ] + } + ], + "cmd": "@kpSearch --query \"When did we talk about Children of Time?\"" + }, + { + "searchText": "When did we talk about Profiles of the Future?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "When did we talk about Profiles of the Future?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talk" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "we", + "isNamePronoun": true + } + ], + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "When did we talk about Profiles of the Future?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 45, + 44, + 88, + 101 + ], + "entityMatches": [ + 1464, + 1468, + 1463, + 1047, + 1049, + 1033, + 1450, + 1465, + 1466 + ], + "topicMatches": [ + 1046, + 1060, + 1473, + 1562, + 1453 + ], + "actionMatches": [ + 1469, + 1470 + ] + } + ], + "cmd": "@kpSearch --query \"When did we talk about Profiles of the Future?\"" + }, + { + "searchText": "Who mentioned Profiles of the Future and at what time?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who mentioned Profiles of the Future and at what time?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "mentioned" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "mentioned", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + } + } + ], + "rawQuery": "Who mentioned Profiles of the Future and at what time?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 88, + 86, + 12, + 101, + 66, + 44, + 5, + 16, + 95, + 45, + 63, + 93, + 15, + 19, + 96, + 74, + 6, + 34, + 22, + 99, + 25, + 20, + 36, + 17 + ], + "entityMatches": [ + 1464, + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1468, + 1501, + 1565, + 738, + 1463, + 653, + 665, + 1333, + 1566, + 751, + 662, + 1047, + 1360, + 1434, + 887, + 1504, + 1049, + 800, + 712, + 1033, + 576, + 611, + 661, + 1081, + 652, + 1450, + 1484, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 713, + 799, + 801, + 1518, + 1465, + 1466, + 1061, + 708 + ], + "topicMatches": [ + 541, + 573, + 763, + 788, + 1559, + 626, + 589, + 1046, + 1060, + 1473, + 862, + 898, + 625, + 646, + 812, + 1381, + 1562, + 660, + 1348, + 1453, + 685, + 1577, + 540, + 594, + 659, + 676, + 736, + 747, + 761, + 786, + 811, + 1380, + 1443, + 596 + ], + "actionMatches": [ + 1469, + 1470, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 772, + 773, + 776, + 793, + 794, + 795, + 740, + 741, + 743, + 744, + 755, + 655, + 657, + 1551, + 1555, + 669, + 674, + 759, + 889, + 890, + 892, + 893, + 903, + 908, + 806, + 666, + 671, + 1088, + 1089, + 1094, + 1095, + 1440, + 691, + 1309, + 839, + 1570, + 1571, + 1574, + 1575, + 592, + 593, + 721, + 729, + 805, + 807, + 1521, + 1523, + 702, + 714, + 715 + ] + } + ], + "cmd": "@kpSearch --query \"Who mentioned Profiles of the Future and at what time?\"" + }, + { + "searchText": "What did Adrian say about Children of Memory?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Adrian say about Children of Memory?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Children of Memory", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "children of memory", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8880911639516562 + }, + { + "text": "children of time characters", + "weight": 0.8619465765599191 + }, + { + "text": "children", + "weight": 0.8588892013957266 + }, + { + "text": "childhood", + "weight": 0.8527073163182464 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "children of memory", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of memory", + "weight": 10 + }, + { + "text": "children of time", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8880911639516562 + }, + { + "text": "children of time characters", + "weight": 0.8619465765599191 + }, + { + "text": "children", + "weight": 0.8588892013957266 + }, + { + "text": "childhood", + "weight": 0.8527073163182464 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "children of memory", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of memory", + "weight": 10 + }, + { + "text": "children of time", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8880911639516562 + }, + { + "text": "children of time characters", + "weight": 0.8619465765599191 + }, + { + "text": "children", + "weight": 0.8588892013957266 + }, + { + "text": "childhood", + "weight": 0.8527073163182464 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "children of memory", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of memory", + "weight": 10 + }, + { + "text": "children of time", + "weight": 0.9246831036693335 + }, + { + "text": "children of ruin", + "weight": 0.8880911639516562 + }, + { + "text": "children of time characters", + "weight": 0.8619465765599191 + }, + { + "text": "children", + "weight": 0.8588892013957266 + }, + { + "text": "childhood", + "weight": 0.8527073163182464 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Adrian say about Children of Memory?" + } + ], + "results": [ + { + "messageMatches": [ + 63, + 5, + 12, + 17 + ], + "entityMatches": [ + 1213, + 569, + 614, + 21, + 56, + 80, + 310, + 567, + 610, + 568, + 612, + 613, + 615, + 616, + 665, + 662, + 611, + 661 + ], + "topicMatches": [ + 573, + 626, + 625, + 676 + ], + "actionMatches": [ + 23, + 58, + 83, + 84, + 313, + 314, + 570, + 571, + 618, + 619, + 1221, + 1228, + 669, + 674, + 667, + 672, + 666, + 671 + ] + } + ], + "cmd": "@kpSearch --query \"What did Adrian say about Children of Memory?\"" + }, + { + "searchText": "How did Asimov get referenced?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "How did Asimov get referenced?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "referenced" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "Asimov", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "referenced", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "mention", + "weight": 0.856912532560317 + }, + { + "text": "inspired", + "weight": 0.8529226866012234 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "asimov", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "science fiction writers and philosophers", + "weight": 0.8738394190915796 + }, + { + "text": "science fiction writer", + "weight": 0.8696899103119655 + }, + { + "text": "sci-fi authors", + "weight": 0.8692697169774936 + }, + { + "text": "sci-fi novels", + "weight": 0.8615138039342004 + }, + { + "text": "science fiction writers", + "weight": 0.8598490877039751 + }, + { + "text": "science fiction novels", + "weight": 0.8583504237904256 + }, + { + "text": "science fiction authors", + "weight": 0.8580172264879324 + }, + { + "text": "science fiction", + "weight": 0.8563199082887123 + }, + { + "text": "skynet", + "weight": 0.85422490355617 + }, + { + "text": "science fiction books", + "weight": 0.8530140422076291 + }, + { + "text": "science fiction writing", + "weight": 0.851589231753121 + }, + { + "text": "sci-fi", + "weight": 0.8503083885021027 + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + } + ] + } + } + ], + "rawQuery": "How did Asimov get referenced?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 88, + 28, + 63, + 74, + 14, + 93, + 86, + 76, + 36, + 59, + 66, + 62, + 12, + 40, + 34, + 45, + 53, + 84, + 30, + 6, + 89, + 60, + 51, + 67 + ], + "entityMatches": [ + 1219, + 1456, + 1047, + 575, + 567, + 0, + 1, + 2, + 5, + 6, + 7, + 10, + 11, + 12, + 15, + 16, + 17, + 20, + 21, + 22, + 25, + 26, + 27, + 30, + 31, + 32, + 35, + 36, + 37, + 40, + 41, + 42, + 45, + 46, + 47, + 50, + 51, + 52, + 55, + 56, + 57, + 60, + 61, + 62, + 65, + 66, + 67, + 70, + 71, + 72, + 75, + 76, + 77, + 80, + 81, + 82, + 85, + 86, + 87, + 90, + 91, + 92, + 95, + 96, + 97, + 100, + 101, + 102, + 105, + 106, + 107, + 110, + 111, + 112, + 115, + 116, + 117, + 120, + 121, + 122, + 125, + 126, + 127, + 130, + 131, + 132, + 135, + 136, + 137, + 140, + 141, + 142, + 145, + 146, + 147, + 150, + 151, + 152, + 155, + 156, + 157, + 160, + 161, + 162, + 165, + 166, + 167, + 170, + 171, + 172, + 175, + 176, + 177, + 180, + 181, + 182, + 185, + 186, + 187, + 190, + 191, + 192, + 195, + 196, + 197, + 200, + 201, + 202, + 205, + 206, + 207, + 210, + 211, + 212, + 215, + 216, + 217, + 220, + 221, + 222, + 225, + 226, + 227, + 230, + 231, + 232, + 235, + 236, + 237, + 240, + 241, + 242, + 245, + 246, + 247, + 250, + 251, + 252, + 255, + 256, + 257, + 260, + 261, + 262, + 265, + 266, + 267, + 270, + 271, + 272, + 275, + 276, + 277, + 280, + 281, + 282, + 285, + 286, + 287, + 290, + 291, + 292, + 295, + 296, + 297, + 300, + 301, + 302, + 305, + 306, + 307, + 310, + 311, + 312, + 315, + 316, + 317, + 320, + 321, + 322, + 325, + 326, + 327, + 330, + 331, + 332, + 335, + 336, + 337, + 340, + 341, + 342, + 345, + 346, + 347, + 350, + 351, + 352, + 355, + 356, + 357, + 360, + 361, + 362, + 365, + 366, + 367, + 370, + 371, + 372, + 375, + 376, + 377, + 380, + 381, + 382, + 385, + 386, + 387, + 390, + 391, + 392, + 395, + 396, + 397, + 400, + 401, + 402, + 405, + 406, + 407, + 410, + 411, + 412, + 415, + 416, + 417, + 420, + 421, + 422, + 425, + 426, + 427, + 430, + 431, + 432, + 435, + 436, + 437, + 440, + 441, + 442, + 445, + 446, + 447, + 450, + 451, + 452, + 455, + 456, + 457, + 460, + 461, + 462, + 465, + 466, + 467, + 470, + 471, + 472, + 475, + 476, + 477, + 480, + 481, + 482, + 485, + 486, + 487, + 490, + 491, + 492, + 495, + 496, + 497, + 500, + 501, + 502, + 505, + 506, + 507, + 510, + 511, + 512, + 515, + 516, + 517, + 520, + 521, + 522, + 527, + 544, + 561, + 566, + 591, + 597, + 602, + 610, + 629, + 641, + 651, + 663, + 708, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 983, + 985, + 1031, + 1032, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1434, + 1435, + 1448, + 1463, + 1478, + 1516, + 1518, + 1567, + 1580, + 1290, + 1146, + 1335, + 1384, + 706, + 707, + 966, + 1029, + 1033, + 1066, + 1545, + 1546, + 886, + 1216, + 1264, + 1346, + 1547, + 835, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1292, + 1315, + 1319, + 1480, + 630, + 662, + 1360, + 545, + 563, + 1113, + 1582, + 1450, + 1049, + 1421, + 859, + 689, + 817, + 1355, + 1374, + 1439, + 1483, + 1301, + 1307, + 751, + 1484, + 984, + 1005, + 864, + 576, + 1464, + 1199, + 1004, + 1174, + 1517 + ], + "topicMatches": [ + 1391, + 1414, + 897, + 1055, + 700, + 1357, + 586, + 620, + 1240, + 1284, + 1300, + 1340, + 1382, + 1431, + 1447, + 1471, + 1497, + 1297, + 1304, + 638, + 898, + 572, + 1455 + ], + "actionMatches": [ + 903, + 908, + 929, + 1007, + 1009, + 1010, + 1012, + 1376, + 1377, + 1378, + 1309, + 549, + 554, + 772, + 774, + 775, + 1149, + 1151, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 1068, + 1070, + 1294, + 1295, + 889, + 892, + 1096, + 1225, + 1232, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1457, + 1458, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 1119, + 1123, + 1451, + 1452, + 682, + 683, + 739, + 740, + 741, + 742, + 743, + 744, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 1520, + 1522, + 1050, + 888, + 904, + 1422, + 1423, + 1424, + 1425, + 1426, + 1427, + 1202, + 1205, + 1268, + 1274, + 581, + 582, + 583, + 692, + 1487, + 1087, + 1269, + 1275, + 1293, + 1302, + 1303, + 1308, + 1310, + 1311, + 759, + 1375, + 1489, + 1490, + 758, + 760, + 890, + 893, + 592, + 593, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"How did Asimov get referenced?\"" + }, + { + "searchText": "Who said that Adrian studied at the 'University of Reading'?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who said that Adrian studied at the 'University of Reading'?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "said" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "additionalEntities": [ + { + "name": "University of Reading", + "isNamePronoun": false, + "type": [ + "school" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "said", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "university of reading", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "university", + "weight": 0.8752718178483946 + } + ] + }, + { + "term": { + "text": "school", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "study", + "weight": 0.9079925379332335 + }, + { + "text": "education", + "weight": 0.9073188976975806 + }, + { + "text": "institution", + "weight": 0.8958646384053154 + }, + { + "text": "job", + "weight": 0.8804345211992621 + }, + { + "text": "graduate", + "weight": 0.8780244195990468 + }, + { + "text": "work", + "weight": 0.8738208296183968 + }, + { + "text": "career", + "weight": 0.8727402462225125 + }, + { + "text": "learning", + "weight": 0.8691472487720716 + }, + { + "text": "working", + "weight": 0.8664186438253275 + }, + { + "text": "profession", + "weight": 0.8654285994731649 + }, + { + "text": "law", + "weight": 0.8606680437605202 + }, + { + "text": "books", + "weight": 0.8600765431827375 + }, + { + "text": "sport", + "weight": 0.8599803023419524 + }, + { + "text": "children", + "weight": 0.8594252432411279 + }, + { + "text": "parents", + "weight": 0.8593138918912956 + }, + { + "text": "paper", + "weight": 0.8589184781551995 + }, + { + "text": "research", + "weight": 0.8587338352423608 + }, + { + "text": "service", + "weight": 0.8578897264991692 + }, + { + "text": "book", + "weight": 0.8563492536817514 + }, + { + "text": "practice", + "weight": 0.8558217742345485 + }, + { + "text": "learn", + "weight": 0.8543457864506536 + }, + { + "text": "sites", + "weight": 0.8540818814908036 + }, + { + "text": "system", + "weight": 0.8531128979826174 + }, + { + "text": "game", + "weight": 0.853012910117892 + }, + { + "text": "task", + "weight": 0.8529735499426325 + }, + { + "text": "industry", + "weight": 0.8529388735994328 + }, + { + "text": "technology", + "weight": 0.8526962808748119 + }, + { + "text": "movie", + "weight": 0.8504256546445399 + } + ] + } + ] + } + } + ], + "rawQuery": "Who said that Adrian studied at the 'University of Reading'?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 20, + 86, + 6, + 11, + 5, + 75, + 76, + 21, + 22, + 15, + 27, + 91, + 88, + 0, + 16, + 25, + 64, + 99, + 10, + 92, + 90, + 98, + 19, + 39 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 611, + 1547, + 663, + 708, + 641, + 1463, + 1, + 2, + 5, + 7, + 10, + 12, + 15, + 17, + 20, + 22, + 25, + 27, + 30, + 32, + 35, + 37, + 40, + 42, + 45, + 47, + 50, + 52, + 55, + 57, + 61, + 62, + 65, + 67, + 71, + 72, + 75, + 77, + 81, + 82, + 85, + 87, + 91, + 92, + 95, + 97, + 101, + 102, + 105, + 107, + 111, + 112, + 115, + 117, + 121, + 122, + 125, + 127, + 131, + 132, + 135, + 137, + 141, + 142, + 145, + 147, + 151, + 152, + 155, + 157, + 161, + 162, + 165, + 167, + 171, + 172, + 175, + 177, + 181, + 182, + 185, + 187, + 191, + 192, + 195, + 197, + 201, + 202, + 205, + 207, + 211, + 212, + 215, + 217, + 221, + 222, + 225, + 227, + 231, + 232, + 235, + 237, + 241, + 242, + 245, + 247, + 251, + 252, + 255, + 257, + 261, + 262, + 265, + 267, + 271, + 272, + 275, + 277, + 281, + 282, + 285, + 287, + 291, + 292, + 295, + 297, + 301, + 302, + 305, + 307, + 311, + 312, + 315, + 317, + 321, + 322, + 325, + 327, + 331, + 332, + 335, + 337, + 341, + 342, + 345, + 347, + 351, + 352, + 355, + 357, + 361, + 362, + 365, + 367, + 371, + 372, + 375, + 377, + 381, + 382, + 385, + 387, + 391, + 392, + 395, + 397, + 401, + 402, + 405, + 407, + 411, + 412, + 415, + 417, + 421, + 422, + 425, + 427, + 430, + 432, + 435, + 437, + 440, + 442, + 445, + 447, + 450, + 452, + 455, + 457, + 460, + 462, + 465, + 467, + 470, + 472, + 475, + 477, + 480, + 482, + 485, + 487, + 490, + 492, + 495, + 497, + 500, + 502, + 505, + 507, + 510, + 512, + 515, + 517, + 520, + 522, + 544, + 561, + 566, + 591, + 597, + 629, + 651, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 985, + 1031, + 1032, + 1047, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1435, + 1456, + 1478, + 1518, + 661, + 576, + 701, + 1033, + 751, + 662, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 1146, + 1335, + 1384, + 790, + 706, + 707, + 966, + 1029, + 1066, + 1545, + 1546, + 712, + 886, + 1216, + 1219, + 1264, + 1346, + 630, + 711, + 835, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1360, + 737, + 1419, + 1420, + 1421, + 545, + 563, + 1113, + 1582, + 738, + 1021, + 1049, + 665, + 1245, + 859, + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 883, + 919, + 1198, + 1213, + 1331, + 1345, + 1468, + 1501, + 1565, + 1083, + 1145, + 1131, + 821, + 1030, + 1176, + 1214, + 1215, + 1220, + 1248, + 1301, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 782, + 984, + 1005, + 923, + 1028, + 1115, + 1144, + 1436, + 1437, + 1449, + 1482, + 1528, + 1529, + 1530, + 864, + 1566, + 1464, + 1199, + 1004, + 1065, + 1061, + 1106, + 1174 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 1391, + 1414, + 678, + 704, + 730, + 897, + 636, + 705, + 745, + 779, + 735, + 541, + 573, + 763, + 788, + 780, + 685, + 530, + 1040, + 1454, + 898 + ], + "actionMatches": [ + 1520, + 1522, + 581, + 582, + 583, + 604, + 606, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1440, + 1584, + 1589, + 903, + 908, + 929, + 1007, + 1009, + 1010, + 1012, + 666, + 671, + 702, + 772, + 775, + 774, + 739, + 740, + 741, + 743, + 744, + 1268, + 1274, + 714, + 715, + 549, + 554, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 1068, + 1070, + 1294, + 1295, + 889, + 892, + 1096, + 703, + 839, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1309, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 1119, + 1123, + 682, + 683, + 742, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 667, + 672, + 668, + 673, + 669, + 674, + 888, + 904, + 1202, + 1205, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 773, + 776, + 793, + 794, + 795, + 551, + 1087, + 1269, + 1275, + 1133, + 1135, + 1136, + 1138, + 759, + 1035, + 1038, + 1180, + 1184, + 1266, + 1272, + 691, + 866, + 872, + 1034, + 1036, + 1037, + 1039, + 1451, + 1452, + 758, + 760, + 890, + 893, + 592, + 593, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"Who said that Adrian studied at the 'University of Reading'?\"" + }, + { + "searchText": "What did Kevin say about Profiles of the Future?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Kevin say about Profiles of the Future?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Kevin say about Profiles of the Future?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 44, + 101 + ], + "entityMatches": [ + 1464, + 215, + 450, + 500, + 1468, + 1463, + 1033, + 1465, + 1466 + ], + "topicMatches": [ + 1046, + 1473, + 1562 + ], + "actionMatches": [ + 218, + 219, + 453, + 454, + 503, + 504, + 1469, + 1470 + ] + } + ], + "cmd": "@kpSearch --query \"What did Kevin say about Profiles of the Future?\"" + }, + { + "searchText": "What did Adrian say about Profiles of the Future?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Adrian say about Profiles of the Future?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Adrian say about Profiles of the Future?" + } + ], + "results": [ + { + "messageMatches": [ + 45 + ], + "entityMatches": [ + 220, + 1047, + 1049 + ], + "topicMatches": [ + 1060 + ], + "actionMatches": [ + 223, + 224 + ] + } + ], + "cmd": "@kpSearch --query \"What did Adrian say about Profiles of the Future?\"" + }, + { + "searchText": "Did Kevin talk about 'Profiles of the Future' with Adrian?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Did Kevin talk about 'Profiles of the Future' with Adrian?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talk" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "additionalEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Did Kevin talk about 'Profiles of the Future' with Adrian?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 44, + 101 + ], + "entityMatches": [ + 1464, + 215, + 450, + 500, + 1468, + 216, + 451, + 501, + 1463, + 1033, + 1465, + 1466 + ], + "topicMatches": [ + 1046, + 1473, + 1562 + ], + "actionMatches": [ + 218, + 453, + 503, + 219, + 454, + 504, + 1469, + 1470 + ] + } + ], + "cmd": "@kpSearch --query \"Did Kevin talk about 'Profiles of the Future' with Adrian?\"" + }, + { + "searchText": "Did Kevin talk about 'Profiles of the Future' with Christina?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Did Kevin talk about 'Profiles of the Future' with Christina?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talk" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "additionalEntities": [ + { + "name": "Christina", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "christina", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "christina warren" + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Did Kevin talk about 'Profiles of the Future' with Christina?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 44, + 101 + ], + "entityMatches": [ + 1464, + 215, + 450, + 500, + 1468, + 217, + 452, + 502, + 1463, + 1033, + 1465, + 1466 + ], + "topicMatches": [ + 1046, + 1473, + 1562 + ], + "actionMatches": [ + 219, + 454, + 504, + 218, + 453, + 503, + 1469, + 1470 + ] + } + ], + "cmd": "@kpSearch --query \"Did Kevin talk about 'Profiles of the Future' with Christina?\"" + }, + { + "searchText": "Did Kevin talk about 'Profiles of the Future' with Umesh?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Did Kevin talk about 'Profiles of the Future' with Umesh?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talk" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Profiles of the Future", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "additionalEntities": [ + { + "name": "Umesh", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "writer", + "weight": 0.8636795845477033 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "stuff", + "weight": 0.8536762561617918 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "umesh", + "weight": 10 + }, + "relatedTerms": [] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talk", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "profiles of the future", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profiles of the future", + "weight": 10 + }, + { + "text": "future predictions", + "weight": 0.8718974381654807 + }, + { + "text": "future landscapes", + "weight": 0.8591752274939053 + }, + { + "text": "future ai", + "weight": 0.8542998451659413 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Did Kevin talk about 'Profiles of the Future' with Umesh?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 44, + 101 + ], + "entityMatches": [ + 1464, + 215, + 450, + 500, + 1468, + 1463, + 1033, + 1465, + 1466 + ], + "topicMatches": [ + 1046, + 1473, + 1562 + ], + "actionMatches": [ + 218, + 219, + 453, + 454, + 503, + 504, + 1469, + 1470 + ] + } + ], + "cmd": "@kpSearch --query \"Did Kevin talk about 'Profiles of the Future' with Umesh?\"" + }, + { + "searchText": "Give me an overview of Portids", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "overview of Portids", + "filters": [ + { + "searchTerms": [ + "Portids" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "portids", + "weight": 10 + }, + "relatedTerms": [] + } + ] + } + } + ], + "rawQuery": "overview of Portids" + } + ], + "results": [ + { + "messageMatches": [ + 30 + ], + "entityMatches": [ + 851 + ], + "actionMatches": [ + 853, + 854 + ] + } + ], + "cmd": "@kpSearch --query \"Give me an overview of Portids\"" + }, + { + "searchText": "Tell me what Kevin said about 'portids'?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Kevin say about 'portids'?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "portids", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "portids", + "weight": 10 + }, + "relatedTerms": [] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "portids", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "portids", + "weight": 10 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "portids", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "portids", + "weight": 10 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "portids", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "portids", + "weight": 10 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Kevin say about 'portids'?" + } + ], + "results": [ + { + "messageMatches": [ + 30 + ], + "entityMatches": [ + 145, + 851 + ], + "actionMatches": [ + 148, + 149, + 853, + 854 + ] + } + ], + "cmd": "@kpSearch --query \"Tell me what Kevin said about 'portids'?\"" + }, + { + "searchText": "Where did Adrian study?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Where did Adrian study?", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "actionVerbs": { + "words": [ + "study" + ], + "tense": "Past" + }, + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "study", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "study", + "weight": 10 + }, + { + "text": "research", + "weight": 0.9114409863936512 + }, + { + "text": "studied", + "weight": 0.9103561389052532 + }, + { + "text": "school", + "weight": 0.9079925379332335 + }, + { + "text": "practice", + "weight": 0.9027146067556441 + }, + { + "text": "learn", + "weight": 0.893695599631318 + }, + { + "text": "learning", + "weight": 0.8794088946937061 + }, + { + "text": "work", + "weight": 0.8784604581563951 + }, + { + "text": "education", + "weight": 0.8706472259486062 + }, + { + "text": "reading", + "weight": 0.8697260937760158 + }, + { + "text": "graduate", + "weight": 0.8663067052439166 + }, + { + "text": "test", + "weight": 0.8646195754647529 + }, + { + "text": "job", + "weight": 0.862108727508981 + }, + { + "text": "working", + "weight": 0.8616053383143865 + }, + { + "text": "playing", + "weight": 0.8612889776517374 + }, + { + "text": "be studied by", + "weight": 0.8611824320433201 + }, + { + "text": "watch", + "weight": 0.8609701179411657 + }, + { + "text": "play", + "weight": 0.8591912698087716 + }, + { + "text": "performance", + "weight": 0.8571402908487098 + }, + { + "text": "read", + "weight": 0.8568311262637076 + }, + { + "text": "talk", + "weight": 0.8548672662613528 + }, + { + "text": "task", + "weight": 0.8537984164784945 + }, + { + "text": "career", + "weight": 0.8536716575448631 + }, + { + "text": "look", + "weight": 0.8531697466224798 + }, + { + "text": "books", + "weight": 0.8529481424660358 + }, + { + "text": "subject", + "weight": 0.8526252657885129 + }, + { + "text": "profession", + "weight": 0.8524462127925252 + }, + { + "text": "law", + "weight": 0.8517094869029367 + }, + { + "text": "institution", + "weight": 0.8514522647520371 + }, + { + "text": "paper", + "weight": 0.8508476820774012 + }, + { + "text": "attend", + "weight": 0.8507123525408427 + }, + { + "text": "book", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + } + ] + } + } + ], + "rawQuery": "Where did Adrian study?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 20, + 11, + 86, + 5, + 6, + 88, + 0, + 21, + 104, + 91, + 22, + 16, + 99, + 75, + 102, + 40, + 15, + 10, + 14, + 32, + 87, + 72, + 38, + 46 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1547, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 576 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 780, + 779, + 678, + 704, + 730, + 646, + 812, + 1381, + 1390, + 636, + 705, + 745, + 541, + 573, + 763, + 788, + 735 + ], + "actionMatches": [ + 581, + 582, + 583, + 604, + 606, + 1520, + 1522, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1440, + 1584, + 1589 + ] + } + ], + "cmd": "@kpSearch --query \"Where did Adrian study?\"" + }, + { + "searchText": "Which university did Adrian Study at?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Which university did Adrian study at?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "study" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "university" + ] + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "study", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "study", + "weight": 10 + }, + { + "text": "research", + "weight": 0.9114409863936512 + }, + { + "text": "studied", + "weight": 0.9103561389052532 + }, + { + "text": "school", + "weight": 0.9079925379332335 + }, + { + "text": "practice", + "weight": 0.9027146067556441 + }, + { + "text": "learn", + "weight": 0.893695599631318 + }, + { + "text": "learning", + "weight": 0.8794088946937061 + }, + { + "text": "work", + "weight": 0.8784604581563951 + }, + { + "text": "education", + "weight": 0.8706472259486062 + }, + { + "text": "reading", + "weight": 0.8697260937760158 + }, + { + "text": "graduate", + "weight": 0.8663067052439166 + }, + { + "text": "test", + "weight": 0.8646195754647529 + }, + { + "text": "job", + "weight": 0.862108727508981 + }, + { + "text": "working", + "weight": 0.8616053383143865 + }, + { + "text": "playing", + "weight": 0.8612889776517374 + }, + { + "text": "be studied by", + "weight": 0.8611824320433201 + }, + { + "text": "watch", + "weight": 0.8609701179411657 + }, + { + "text": "play", + "weight": 0.8591912698087716 + }, + { + "text": "performance", + "weight": 0.8571402908487098 + }, + { + "text": "read", + "weight": 0.8568311262637076 + }, + { + "text": "talk", + "weight": 0.8548672662613528 + }, + { + "text": "task", + "weight": 0.8537984164784945 + }, + { + "text": "career", + "weight": 0.8536716575448631 + }, + { + "text": "look", + "weight": 0.8531697466224798 + }, + { + "text": "books", + "weight": 0.8529481424660358 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "profession", + "weight": 0.8524462127925252 + }, + { + "text": "law", + "weight": 0.8517094869029367 + }, + { + "text": "institution", + "weight": 0.8704681581471554 + }, + { + "text": "paper", + "weight": 0.8508476820774012 + }, + { + "text": "attend", + "weight": 0.8507123525408427 + }, + { + "text": "book", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "university", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "institution", + "weight": 0.8704681581471554 + }, + { + "text": "university of reading", + "weight": 0.8604918147137677 + } + ] + } + ] + } + } + ], + "rawQuery": "Which university did Adrian study at?" + } + ], + "results": [ + { + "messageMatches": [ + 20, + 12, + 11, + 86, + 6, + 5, + 21, + 88, + 0, + 22, + 104, + 40, + 76, + 16, + 10, + 99, + 75, + 91, + 15, + 26, + 87, + 72, + 38, + 46, + 42 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 1547, + 1, + 2, + 5, + 7, + 10, + 12, + 15, + 17, + 20, + 22, + 25, + 27, + 30, + 32, + 35, + 37, + 40, + 42, + 45, + 47, + 50, + 52, + 55, + 57, + 61, + 62, + 65, + 67, + 71, + 72, + 75, + 77, + 81, + 82, + 85, + 87, + 91, + 92, + 95, + 97, + 101, + 102, + 105, + 107, + 111, + 112, + 115, + 117, + 121, + 122, + 125, + 127, + 131, + 132, + 135, + 137, + 141, + 142, + 145, + 147, + 151, + 152, + 155, + 157, + 161, + 162, + 165, + 167, + 171, + 172, + 175, + 177, + 181, + 182, + 185, + 187, + 191, + 192, + 195, + 197, + 201, + 202, + 205, + 207, + 211, + 212, + 215, + 217, + 221, + 222, + 225, + 227, + 231, + 232, + 235, + 237, + 241, + 242, + 245, + 247, + 251, + 252, + 255, + 257, + 261, + 262, + 265, + 267, + 271, + 272, + 275, + 277, + 281, + 282, + 285, + 287, + 291, + 292, + 295, + 297, + 301, + 302, + 305, + 307, + 311, + 312, + 315, + 317, + 321, + 322, + 325, + 327, + 331, + 332, + 335, + 337, + 341, + 342, + 345, + 347, + 351, + 352, + 355, + 357, + 361, + 362, + 365, + 367, + 371, + 372, + 375, + 377, + 381, + 382, + 385, + 387, + 391, + 392, + 395, + 397, + 401, + 402, + 405, + 407, + 411, + 412, + 415, + 417, + 421, + 422, + 425, + 427, + 430, + 432, + 435, + 437, + 440, + 442, + 445, + 447, + 450, + 452, + 455, + 457, + 460, + 462, + 465, + 467, + 470, + 472, + 475, + 477, + 480, + 482, + 485, + 487, + 490, + 492, + 495, + 497, + 500, + 502, + 505, + 507, + 510, + 512, + 515, + 517, + 520, + 522, + 544, + 561, + 566, + 591, + 597, + 629, + 641, + 651, + 663, + 708, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 985, + 1031, + 1032, + 1047, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1435, + 1456, + 1463, + 1478, + 1518, + 701, + 576, + 1146, + 1335, + 1384, + 1029, + 1033, + 1066, + 1545, + 1546, + 886, + 966, + 1216, + 1219, + 1264, + 1346, + 835, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 662, + 1360, + 545, + 563, + 1113, + 1582, + 611, + 1049, + 859, + 751, + 984, + 1005, + 864, + 1464, + 1199, + 1004, + 1174 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 1391, + 1414, + 780, + 897, + 779, + 678, + 704, + 730, + 646, + 812, + 1381, + 1390, + 636, + 705, + 745, + 541, + 573, + 763, + 788, + 735, + 898 + ], + "actionMatches": [ + 1520, + 1522, + 581, + 582, + 583, + 604, + 606, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1440, + 1584, + 1589, + 903, + 908, + 929, + 1007, + 1009, + 1010, + 1012, + 702, + 549, + 554, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 1068, + 1070, + 1294, + 1295, + 889, + 892, + 1096, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1309, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 1119, + 1123, + 682, + 683, + 739, + 740, + 741, + 742, + 743, + 744, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 888, + 904, + 1202, + 1205, + 1268, + 1274, + 1087, + 1269, + 1275, + 759, + 758, + 760, + 890, + 893, + 592, + 593, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"Which university did Adrian Study at?\"" + }, + { + "searchText": "Which was the name of Adrian's college?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What is the name of Adrian's college?", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "college", + "isNamePronoun": false, + "type": [ + "institution" + ] + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "college", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "school", + "weight": 0.921637628579625 + }, + { + "text": "graduate", + "weight": 0.8885004135629991 + }, + { + "text": "university", + "weight": 0.8857309073327063 + }, + { + "text": "career", + "weight": 0.8762457635720412 + }, + { + "text": "profession", + "weight": 0.8714673636548091 + }, + { + "text": "education", + "weight": 0.8701771685545058 + }, + { + "text": "study", + "weight": 0.8649779810780862 + } + ] + }, + { + "term": { + "text": "institution", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "organ", + "weight": 0.8652067700006192 + }, + { + "text": "culture", + "weight": 0.8643547045363144 + }, + { + "text": "systems", + "weight": 0.8631429637243371 + }, + { + "text": "system", + "weight": 0.8618195686893446 + }, + { + "text": "industry", + "weight": 0.8561042913986937 + }, + { + "text": "sites", + "weight": 0.8549982312837364 + }, + { + "text": "service", + "weight": 0.8545190962182285 + } + ] + } + ] + } + } + ], + "rawQuery": "What is the name of Adrian's college?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 11, + 0, + 6, + 5, + 88, + 86, + 20, + 104, + 102, + 99, + 75, + 40, + 10, + 91, + 32, + 87, + 72, + 38, + 70, + 42, + 68, + 52, + 54, + 46 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 611, + 661, + 701, + 1547, + 751, + 641, + 1, + 2, + 5, + 7, + 10, + 12, + 15, + 17, + 20, + 22, + 25, + 27, + 30, + 32, + 35, + 37, + 40, + 42, + 45, + 47, + 50, + 52, + 55, + 57, + 61, + 62, + 65, + 67, + 71, + 72, + 75, + 77, + 81, + 82, + 85, + 87, + 91, + 92, + 95, + 97, + 101, + 102, + 105, + 107, + 111, + 112, + 115, + 117, + 121, + 122, + 125, + 127, + 131, + 132, + 135, + 137, + 141, + 142, + 145, + 147, + 151, + 152, + 155, + 157, + 161, + 162, + 165, + 167, + 171, + 172, + 175, + 177, + 181, + 182, + 185, + 187, + 191, + 192, + 195, + 197, + 201, + 202, + 205, + 207, + 211, + 212, + 215, + 217, + 221, + 222, + 225, + 227, + 231, + 232, + 235, + 237, + 241, + 242, + 245, + 247, + 251, + 252, + 255, + 257, + 261, + 262, + 265, + 267, + 271, + 272, + 275, + 277, + 281, + 282, + 285, + 287, + 291, + 292, + 295, + 297, + 301, + 302, + 305, + 307, + 311, + 312, + 315, + 317, + 321, + 322, + 325, + 327, + 331, + 332, + 335, + 337, + 341, + 342, + 345, + 347, + 351, + 352, + 355, + 357, + 361, + 362, + 365, + 367, + 371, + 372, + 375, + 377, + 381, + 382, + 385, + 387, + 391, + 392, + 395, + 397, + 401, + 402, + 405, + 407, + 411, + 412, + 415, + 417, + 421, + 422, + 425, + 427, + 430, + 432, + 435, + 437, + 440, + 442, + 445, + 447, + 450, + 452, + 455, + 457, + 460, + 462, + 465, + 467, + 470, + 472, + 475, + 477, + 480, + 482, + 485, + 487, + 490, + 492, + 495, + 497, + 500, + 502, + 505, + 507, + 510, + 512, + 515, + 517, + 520, + 522, + 544, + 561, + 566, + 591, + 597, + 629, + 651, + 663, + 708, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 985, + 1031, + 1032, + 1047, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1435, + 1456, + 1463, + 1478, + 1518, + 790, + 576, + 1335, + 662, + 1292, + 1146, + 1384, + 706, + 707, + 966, + 1029, + 1033, + 1066, + 1545, + 1546, + 886, + 630, + 711, + 1216, + 1219, + 1264, + 1346, + 737, + 1419, + 1420, + 1421, + 835, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1315, + 1319, + 1480, + 1360, + 968, + 545, + 563, + 1113, + 1582, + 821, + 1030, + 1176, + 1214, + 1215, + 1220, + 1248, + 1301, + 1049, + 859, + 782, + 1131, + 1245, + 984, + 1005, + 864, + 1464, + 1199, + 1004, + 1197, + 1174 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 1391, + 1414, + 897, + 636, + 705, + 745, + 678, + 704, + 730, + 898 + ], + "actionMatches": [ + 1520, + 1522, + 581, + 582, + 583, + 604, + 606, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1440, + 1584, + 1589, + 903, + 908, + 929, + 1007, + 1009, + 1010, + 1012, + 1294, + 1295, + 1268, + 1274, + 666, + 671, + 549, + 554, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 703, + 702, + 1068, + 1070, + 889, + 892, + 1096, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1309, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 1119, + 1123, + 714, + 715, + 1203, + 1206, + 1270, + 1276, + 1035, + 1038, + 1180, + 1184, + 1266, + 1272, + 682, + 683, + 739, + 740, + 741, + 742, + 743, + 744, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 888, + 904, + 1202, + 1205, + 1133, + 1135, + 1136, + 1138, + 1087, + 1269, + 1275, + 759, + 758, + 760, + 890, + 893, + 592, + 593, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"Which was the name of Adrian's college?\"" + }, + { + "searchText": "Who studied at the University of Amsterdam?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who studied at the University of Amsterdam?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "studied" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "University of Amsterdam", + "isNamePronoun": false, + "type": [ + "school" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "studied", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "studied", + "weight": 10 + }, + { + "text": "study", + "weight": 0.9103561389052532 + }, + { + "text": "be studied by", + "weight": 0.9056059514319782 + }, + { + "text": "research", + "weight": 0.8649162077240531 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "university of amsterdam", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "university", + "weight": 0.8752718178483946 + }, + { + "text": "university of reading", + "weight": 0.8537206075625514 + } + ] + }, + { + "term": { + "text": "school", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "education", + "weight": 0.9073188976975806 + }, + { + "text": "institution", + "weight": 0.8958646384053154 + }, + { + "text": "job", + "weight": 0.8804345211992621 + }, + { + "text": "graduate", + "weight": 0.8780244195990468 + }, + { + "text": "work", + "weight": 0.8738208296183968 + }, + { + "text": "career", + "weight": 0.8727402462225125 + }, + { + "text": "learning", + "weight": 0.8691472487720716 + }, + { + "text": "working", + "weight": 0.8664186438253275 + }, + { + "text": "profession", + "weight": 0.8654285994731649 + }, + { + "text": "law", + "weight": 0.8606680437605202 + }, + { + "text": "books", + "weight": 0.8600765431827375 + }, + { + "text": "sport", + "weight": 0.8599803023419524 + }, + { + "text": "children", + "weight": 0.8594252432411279 + }, + { + "text": "parents", + "weight": 0.8593138918912956 + }, + { + "text": "kid", + "weight": 0.8592532781443597 + }, + { + "text": "paper", + "weight": 0.8589184781551995 + }, + { + "text": "service", + "weight": 0.8578897264991692 + }, + { + "text": "child", + "weight": 0.8564181014715567 + }, + { + "text": "book", + "weight": 0.8563492536817514 + }, + { + "text": "practice", + "weight": 0.8558217742345485 + }, + { + "text": "learn", + "weight": 0.8543457864506536 + }, + { + "text": "sites", + "weight": 0.8540818814908036 + }, + { + "text": "system", + "weight": 0.8531128979826174 + }, + { + "text": "game", + "weight": 0.853012910117892 + }, + { + "text": "task", + "weight": 0.8529735499426325 + }, + { + "text": "industry", + "weight": 0.8529388735994328 + }, + { + "text": "company", + "weight": 0.8528428782348347 + }, + { + "text": "technology", + "weight": 0.8526962808748119 + }, + { + "text": "movie", + "weight": 0.8504256546445399 + } + ] + } + ] + } + } + ], + "rawQuery": "Who studied at the University of Amsterdam?" + } + ], + "results": [ + { + "messageMatches": [ + 20, + 21, + 12, + 22, + 16, + 75, + 15, + 17, + 19, + 6, + 91, + 64, + 86, + 14, + 26, + 53, + 25, + 66, + 24, + 5, + 23, + 56, + 81, + 47, + 43 + ], + "entityMatches": [ + 611, + 661, + 701, + 751, + 790, + 712, + 1033, + 630, + 711, + 662, + 737, + 1419, + 1420, + 1421, + 738, + 1021, + 663, + 651, + 665, + 1245, + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 1083, + 1145, + 1131, + 821, + 1030, + 1176, + 1214, + 1215, + 1220, + 1248, + 1301, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 782, + 545, + 563, + 1113, + 1582, + 923, + 1028, + 1115, + 1144, + 1289, + 1290, + 1292, + 1315, + 1319, + 1436, + 1437, + 1449, + 1480, + 1482, + 1528, + 1529, + 1530, + 1566, + 708, + 641, + 1463, + 1061, + 1106, + 544, + 561 + ], + "topicMatches": [ + 678, + 704, + 730, + 636, + 705, + 745, + 779, + 735, + 541, + 573, + 763, + 788, + 780, + 685, + 530, + 1040, + 1454 + ], + "actionMatches": [ + 666, + 671, + 703, + 702, + 839, + 739, + 740, + 741, + 743, + 744, + 667, + 672, + 668, + 673, + 654, + 655, + 656, + 657, + 669, + 674, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 772, + 773, + 776, + 793, + 794, + 795, + 775, + 551, + 774, + 1133, + 1135, + 1136, + 1138, + 1035, + 1038, + 1180, + 1184, + 1266, + 1268, + 1272, + 1274, + 691, + 866, + 872, + 1034, + 1036, + 1037, + 1039, + 1451, + 1452 + ] + } + ], + "cmd": "@kpSearch --query \"Who studied at the University of Amsterdam?\"" + }, + { + "searchText": "Who studied at the University of Reading?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who studied at the University of Reading?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "studied" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "University of Reading", + "isNamePronoun": false, + "type": [ + "school" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "studied", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "studied", + "weight": 10 + }, + { + "text": "study", + "weight": 0.9103561389052532 + }, + { + "text": "be studied by", + "weight": 0.9056059514319782 + }, + { + "text": "research", + "weight": 0.8649162077240531 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "university of reading", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "university", + "weight": 0.8752718178483946 + } + ] + }, + { + "term": { + "text": "school", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "education", + "weight": 0.9073188976975806 + }, + { + "text": "institution", + "weight": 0.8958646384053154 + }, + { + "text": "job", + "weight": 0.8804345211992621 + }, + { + "text": "graduate", + "weight": 0.8780244195990468 + }, + { + "text": "work", + "weight": 0.8738208296183968 + }, + { + "text": "career", + "weight": 0.8727402462225125 + }, + { + "text": "learning", + "weight": 0.8691472487720716 + }, + { + "text": "working", + "weight": 0.8664186438253275 + }, + { + "text": "profession", + "weight": 0.8654285994731649 + }, + { + "text": "law", + "weight": 0.8606680437605202 + }, + { + "text": "books", + "weight": 0.8600765431827375 + }, + { + "text": "sport", + "weight": 0.8599803023419524 + }, + { + "text": "children", + "weight": 0.8594252432411279 + }, + { + "text": "parents", + "weight": 0.8593138918912956 + }, + { + "text": "kid", + "weight": 0.8592532781443597 + }, + { + "text": "paper", + "weight": 0.8589184781551995 + }, + { + "text": "service", + "weight": 0.8578897264991692 + }, + { + "text": "child", + "weight": 0.8564181014715567 + }, + { + "text": "book", + "weight": 0.8563492536817514 + }, + { + "text": "practice", + "weight": 0.8558217742345485 + }, + { + "text": "learn", + "weight": 0.8543457864506536 + }, + { + "text": "sites", + "weight": 0.8540818814908036 + }, + { + "text": "system", + "weight": 0.8531128979826174 + }, + { + "text": "game", + "weight": 0.853012910117892 + }, + { + "text": "task", + "weight": 0.8529735499426325 + }, + { + "text": "industry", + "weight": 0.8529388735994328 + }, + { + "text": "company", + "weight": 0.8528428782348347 + }, + { + "text": "technology", + "weight": 0.8526962808748119 + }, + { + "text": "movie", + "weight": 0.8504256546445399 + } + ] + } + ] + } + } + ], + "rawQuery": "Who studied at the University of Reading?" + } + ], + "results": [ + { + "messageMatches": [ + 20, + 21, + 15, + 22, + 12, + 16, + 75, + 26, + 19, + 25, + 14, + 17, + 24, + 23, + 6, + 18, + 64, + 91, + 81, + 28, + 53, + 30, + 5, + 36, + 34 + ], + "entityMatches": [ + 611, + 661, + 701, + 751, + 790, + 712, + 1033, + 630, + 711, + 662, + 737, + 1419, + 1420, + 1421, + 738, + 1021, + 663, + 651, + 665, + 1245, + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 883, + 919, + 1198, + 1199, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 1083, + 1145, + 1131, + 821, + 1030, + 1176, + 1214, + 1215, + 1220, + 1248, + 1301, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 782, + 545, + 563, + 1113, + 1582, + 923, + 1028, + 1115, + 1144, + 1289, + 1290, + 1292, + 1315, + 1319, + 1436, + 1437, + 1449, + 1480, + 1482, + 1528, + 1529, + 1530, + 1566, + 708, + 641, + 1463, + 1061, + 1106, + 544, + 561 + ], + "topicMatches": [ + 678, + 704, + 730, + 636, + 705, + 745, + 779, + 735, + 541, + 573, + 763, + 788, + 780, + 685, + 530, + 1040, + 1454 + ], + "actionMatches": [ + 666, + 671, + 703, + 702, + 839, + 739, + 740, + 741, + 743, + 744, + 667, + 672, + 668, + 673, + 654, + 655, + 656, + 657, + 669, + 674, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 772, + 773, + 776, + 793, + 794, + 795, + 775, + 551, + 774, + 1133, + 1135, + 1136, + 1138, + 1035, + 1038, + 1180, + 1184, + 1266, + 1268, + 1272, + 1274, + 691, + 866, + 872, + 1034, + 1036, + 1037, + 1039, + 1451, + 1452 + ] + } + ], + "cmd": "@kpSearch --query \"Who studied at the University of Reading?\"" + }, + { + "searchText": "Give me a list of insects, animals, arachnids", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Give me a list of insects, animals, arachnids", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "insect" + ] + }, + { + "name": "*", + "isNamePronoun": false, + "type": [ + "animal" + ] + }, + { + "name": "*", + "isNamePronoun": false, + "type": [ + "arachnid" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "insect", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "insects", + "weight": 10 + }, + { + "text": "animal world, specifically insects", + "weight": 0.8959242481192833 + }, + { + "text": "flies", + "weight": 0.8755147713236383 + }, + { + "text": "spider", + "weight": 0.9279973963266834 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "animal", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "animal", + "weight": 10 + }, + { + "text": "animal behavior", + "weight": 0.8853139395001849 + }, + { + "text": "human", + "weight": 0.8761103581434334 + }, + { + "text": "species", + "weight": 0.8728613602342458 + }, + { + "text": "anthropomorphic animals", + "weight": 0.870640551417944 + }, + { + "text": "alien", + "weight": 0.8643427139969423 + }, + { + "text": "flies", + "weight": 0.8573647386380743 + }, + { + "text": "robot", + "weight": 0.8562761334141211 + }, + { + "text": "artifact", + "weight": 0.855085062113182 + }, + { + "text": "organ", + "weight": 0.8548290946071508 + }, + { + "text": "vehicle", + "weight": 0.854459220485194 + }, + { + "text": "non-human", + "weight": 0.8511867841387388 + }, + { + "text": "zoology", + "weight": 0.8507391772978699 + }, + { + "text": "nature", + "weight": 0.8501104053451894 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "arachnid", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "arachnids", + "weight": 10 + }, + { + "text": "spider", + "weight": 0.9279973963266834 + }, + { + "text": "spiders", + "weight": 0.9153345454237493 + }, + { + "text": "arachnophobia", + "weight": 0.9097859118926939 + }, + { + "text": "intelligent spiders", + "weight": 0.8881762403388137 + }, + { + "text": "riding giant insects and spiders", + "weight": 0.8721037003952385 + }, + { + "text": "spider senses", + "weight": 0.867646394972307 + }, + { + "text": "spider society", + "weight": 0.8660745261119206 + }, + { + "text": "limited compared to spiders", + "weight": 0.8620879483897361 + }, + { + "text": "octopus", + "weight": 0.8538313711700599 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "Give me a list of insects, animals, arachnids" + } + ], + "results": [ + { + "messageMatches": [ + 21, + 33, + 35, + 55, + 73, + 83, + 30, + 31, + 36, + 67, + 93, + 41, + 51, + 39, + 40 + ], + "entityMatches": [ + 709, + 710, + 863, + 864, + 899, + 900, + 1129, + 1130, + 1316, + 1317, + 1318, + 1408, + 1409, + 1410, + 851, + 859, + 860, + 921, + 1291, + 1481, + 1005, + 1106, + 968, + 984 + ] + } + ], + "cmd": "@kpSearch --query \"Give me a list of insects, animals, arachnids\"" + }, + { + "searchText": "What was the name of Adrian's first books?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What was the name of Adrian's first books?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "name" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "books", + "isNamePronoun": false, + "type": [ + "book" + ], + "facets": [ + { + "facetName": "first", + "facetValue": "*" + } + ] + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "name", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "title", + "weight": 0.8860685348257236 + }, + { + "text": "mention", + "weight": 0.8743879605154318 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "headline", + "weight": 0.861764885043127 + }, + { + "text": "topic", + "weight": 0.8535452841798499 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "person", + "weight": 10 + }, + { + "text": "comment", + "weight": 0.8514482260893863 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "books", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "fiction", + "weight": 0.8969337125400213 + }, + { + "text": "movies", + "weight": 0.8816987595431408 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "documents", + "weight": 0.8684920127401161 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.8643220643714864 + }, + { + "text": "authors", + "weight": 0.8618168180201544 + }, + { + "text": "articles", + "weight": 0.8617403754293433 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "reading", + "weight": 0.8605444868452928 + }, + { + "text": "education", + "weight": 0.8604913792170874 + }, + { + "text": "school", + "weight": 0.8600765431827375 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "quotes", + "weight": 0.8580924287018311 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "music", + "weight": 0.8564490974813763 + }, + { + "text": "writers", + "weight": 0.856123501850461 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "literature", + "weight": 0.8556451407677977 + }, + { + "text": "writing", + "weight": 0.8553746828409448 + }, + { + "text": "study", + "weight": 0.8529481424660358 + }, + { + "text": "research", + "weight": 0.8521577165047225 + }, + { + "text": "science fiction books", + "weight": 0.852083801858706 + }, + { + "text": "tools", + "weight": 0.8520347171350715 + }, + { + "text": "job", + "weight": 0.8599676519780717 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + } + ] + }, + { + "term": { + "text": "first", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "last", + "weight": 0.9248515816290805 + }, + { + "text": "start", + "weight": 0.8522636760615245 + } + ] + } + ] + } + } + ], + "rawQuery": "What was the name of Adrian's first books?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 5, + 6, + 86, + 75, + 25, + 11, + 88, + 0, + 91, + 15, + 16, + 23, + 99, + 102, + 22, + 26, + 19, + 104, + 40, + 34, + 74, + 14, + 24, + 17 + ], + "entityMatches": [ + 1501, + 883, + 1345, + 1199, + 612, + 575, + 602, + 1448, + 1516, + 1547, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1580, + 738, + 533, + 568, + 569, + 577, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 919, + 1198, + 1213, + 1331, + 1464, + 1468, + 1565, + 1434, + 576, + 1463, + 1033, + 1566, + 1146, + 770, + 1335, + 1384, + 1407, + 653, + 665, + 1333, + 1029, + 1066, + 1545, + 1546, + 886, + 1084, + 751, + 835, + 885, + 1200, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 1081, + 651, + 662, + 1047, + 1360, + 544, + 545, + 561, + 563, + 1113, + 1582, + 801, + 1518, + 1065, + 887, + 1504, + 1049, + 800, + 611, + 661, + 712, + 859, + 1031, + 652, + 1450, + 1484, + 713, + 799, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 984, + 1005, + 1374, + 750, + 864, + 1466, + 1465, + 941, + 1061, + 1004, + 708, + 1174 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 541, + 573, + 763, + 788, + 1391, + 1414, + 1559, + 626, + 897, + 589, + 1577, + 862, + 596, + 898, + 625, + 646, + 812, + 1381, + 678, + 704, + 730, + 960, + 1433, + 1370, + 660, + 1348, + 540, + 594, + 659, + 676, + 736, + 747, + 761, + 786, + 811, + 1380, + 1443, + 685 + ], + "actionMatches": [ + 772, + 1551, + 1555, + 1571, + 1575, + 1520, + 1522, + 740, + 741, + 743, + 744, + 581, + 582, + 583, + 604, + 606, + 1550, + 1554, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1584, + 1589, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 773, + 776, + 793, + 794, + 795, + 1440, + 655, + 657, + 1376, + 1377, + 1378, + 1068, + 1070, + 1089, + 1095, + 592, + 593, + 868, + 874, + 757, + 549, + 554, + 774, + 775, + 1149, + 1151, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 755, + 669, + 674, + 1294, + 1295, + 889, + 892, + 1096, + 1309, + 759, + 992, + 993, + 1007, + 1010, + 1085, + 1086, + 1091, + 1092, + 1088, + 1094, + 654, + 656, + 1119, + 1123, + 1570, + 1574, + 1521, + 1523, + 1067, + 1069, + 890, + 893, + 903, + 908, + 682, + 683, + 739, + 742, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 806, + 666, + 671, + 888, + 904, + 1202, + 1205, + 1268, + 1274, + 721, + 729, + 805, + 807, + 1087, + 1269, + 1275, + 691, + 702, + 714, + 715, + 839, + 632, + 1375, + 1489, + 1490, + 758, + 760, + 823, + 827, + 870, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"What was the name of Adrian's first books?\"" + }, + { + "searchText": "What was the name of Adrian's first book?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What was the name of Adrian's first book?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "name" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ], + "facets": [ + { + "facetName": "first", + "facetValue": "*" + } + ] + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "name", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "title", + "weight": 0.8860685348257236 + }, + { + "text": "mention", + "weight": 0.8743879605154318 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "headline", + "weight": 0.861764885043127 + }, + { + "text": "topic", + "weight": 0.8535452841798499 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "person", + "weight": 10 + }, + { + "text": "comment", + "weight": 0.8514482260893863 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "first", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "last", + "weight": 0.9248515816290805 + }, + { + "text": "start", + "weight": 0.8522636760615245 + } + ] + } + ] + } + } + ], + "rawQuery": "What was the name of Adrian's first book?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 5, + 6, + 86, + 11, + 0, + 75, + 88, + 91, + 25, + 99, + 15, + 23, + 16, + 102, + 104, + 19, + 22, + 34, + 40, + 74, + 26, + 24, + 64, + 17 + ], + "entityMatches": [ + 1199, + 575, + 602, + 1448, + 1516, + 1547, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1580, + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 883, + 919, + 1198, + 1213, + 1331, + 1345, + 1464, + 1468, + 1501, + 1565, + 738, + 1434, + 576, + 1463, + 1033, + 1146, + 770, + 1335, + 1384, + 1407, + 653, + 665, + 1333, + 1029, + 1066, + 1545, + 1546, + 1566, + 886, + 1084, + 751, + 835, + 885, + 1200, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 651, + 662, + 1047, + 1360, + 544, + 545, + 561, + 563, + 1113, + 1582, + 887, + 1504, + 1049, + 800, + 712, + 859, + 1031, + 611, + 661, + 1081, + 652, + 1450, + 1484, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 984, + 1005, + 750, + 713, + 799, + 801, + 1518, + 864, + 1465, + 1466, + 1004, + 1061, + 708, + 1174 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 541, + 573, + 763, + 788, + 1391, + 1414, + 1559, + 626, + 897, + 589, + 862, + 898, + 625, + 646, + 812, + 1381, + 660, + 1348, + 685, + 1577, + 540, + 594, + 659, + 676, + 736, + 747, + 761, + 786, + 811, + 1380, + 1443, + 596 + ], + "actionMatches": [ + 772, + 1551, + 1555, + 1520, + 1522, + 1571, + 1575, + 581, + 582, + 583, + 604, + 606, + 1550, + 1554, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1584, + 1589, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 773, + 776, + 793, + 794, + 795, + 740, + 741, + 743, + 744, + 1440, + 655, + 657, + 1089, + 1095, + 592, + 593, + 757, + 549, + 554, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 755, + 669, + 674, + 1068, + 1070, + 1294, + 1295, + 889, + 892, + 1096, + 1309, + 759, + 992, + 993, + 1007, + 1010, + 1085, + 1086, + 1091, + 1092, + 654, + 656, + 1119, + 1123, + 890, + 893, + 903, + 908, + 682, + 683, + 739, + 742, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 806, + 888, + 904, + 1202, + 1205, + 1268, + 1274, + 666, + 671, + 1088, + 1094, + 1087, + 1269, + 1275, + 691, + 839, + 632, + 1570, + 1574, + 758, + 760, + 721, + 729, + 805, + 807, + 1521, + 1523, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 702, + 714, + 715, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"What was the name of Adrian's first book?\"" + }, + { + "searchText": "Who mentioned the University of Reading?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who mentioned the University of Reading?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "mention" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "University of Reading", + "isNamePronoun": false, + "type": [ + "university" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "mention", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "university of reading", + "weight": 10 + }, + "relatedTerms": [] + }, + { + "term": { + "text": "university", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "school", + "weight": 0.8752718178483946 + }, + { + "text": "institution", + "weight": 0.8704681581471554 + } + ] + } + ] + } + } + ], + "rawQuery": "Who mentioned the University of Reading?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 20, + 17, + 23, + 26 + ], + "entityMatches": [ + 611, + 701, + 661, + 751, + 790 + ], + "actionMatches": [ + 702, + 666, + 671 + ] + } + ], + "cmd": "@kpSearch --query \"Who mentioned the University of Reading?\"" + }, + { + "searchText": "What did Adrian say about Skynet?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Adrian say about Skynet?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Skynet", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Adrian say about Skynet?" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 41, + 51, + 59, + 63, + 67, + 69, + 71, + 73 + ], + "entityMatches": [ + 21, + 200, + 250, + 290, + 310, + 330, + 340, + 350, + 360, + 1290, + 1301, + 1307, + 567, + 1106, + 1004, + 1220, + 1219, + 1163, + 1315, + 1107 + ], + "topicMatches": [ + 1297, + 1304, + 1244, + 1241, + 1167, + 1324, + 1329, + 572 + ], + "actionMatches": [ + 23, + 203, + 204, + 253, + 254, + 293, + 294, + 313, + 314, + 333, + 334, + 343, + 344, + 353, + 354, + 363, + 364, + 570, + 571, + 1293, + 1302, + 1303, + 1308, + 1309, + 1310, + 1311, + 1108, + 1109, + 1007, + 1010, + 1227, + 1234, + 1226, + 1233 + ] + } + ], + "cmd": "@kpSearch --query \"What did Adrian say about Skynet?\"" + }, + { + "searchText": "Adrian talked about Skynet. What did he say?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Adrian talked about Skynet. What did he say?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talked" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Skynet", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talked", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talked", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Adrian talked about Skynet. What did he say?" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 41, + 51, + 59, + 63, + 67, + 69, + 71, + 73 + ], + "entityMatches": [ + 21, + 200, + 250, + 290, + 310, + 330, + 340, + 350, + 360, + 1290, + 1301, + 1307, + 567, + 1106, + 1004, + 1220, + 1219, + 1163, + 1315, + 1107 + ], + "topicMatches": [ + 1297, + 1304, + 1244, + 1241, + 1167, + 1324, + 1329, + 572 + ], + "actionMatches": [ + 23, + 203, + 204, + 253, + 254, + 293, + 294, + 313, + 314, + 333, + 334, + 343, + 344, + 353, + 354, + 363, + 364, + 570, + 571, + 1293, + 1302, + 1303, + 1308, + 1309, + 1310, + 1311, + 1108, + 1109, + 1007, + 1010, + 1227, + 1234, + 1226, + 1233 + ] + } + ], + "cmd": "@kpSearch --query \"Adrian talked about Skynet. What did he say?\"" + }, + { + "searchText": "What was Adrian's sentiment when he talked about Skynet?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What was Adrian's sentiment when he talked about Skynet?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "talked" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Skynet", + "isNamePronoun": false + } + ], + "isInformational": false + }, + "searchTerms": [ + "sentiment" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "talked", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ] + }, + { + "term": { + "text": "sentiment", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "sentience", + "weight": 0.8988160283145288 + }, + { + "text": "emotion", + "weight": 0.884048528838712 + }, + { + "text": "sentient", + "weight": 0.8762578433551642 + }, + { + "text": "empathy", + "weight": 0.8731721276032847 + }, + { + "text": "sense", + "weight": 0.8681766690548425 + }, + { + "text": "sympathy", + "weight": 0.8662539358006802 + }, + { + "text": "emotional state", + "weight": 0.8593118877033115 + }, + { + "text": "senses", + "weight": 0.8591064604062022 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "talked", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What was Adrian's sentiment when he talked about Skynet?" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 41, + 51, + 59, + 63, + 67, + 69, + 71, + 73 + ], + "entityMatches": [ + 21, + 200, + 250, + 290, + 310, + 330, + 340, + 350, + 360, + 1290, + 1301, + 1307, + 567, + 1106, + 1004, + 1220, + 1219, + 1163, + 1315, + 1107 + ], + "topicMatches": [ + 1297, + 1304, + 1236, + 1244, + 1241, + 1167, + 1324, + 1329, + 572 + ], + "actionMatches": [ + 23, + 203, + 204, + 253, + 254, + 293, + 294, + 313, + 314, + 333, + 334, + 343, + 344, + 353, + 354, + 363, + 364, + 570, + 571, + 1293, + 1302, + 1303, + 1308, + 1309, + 1310, + 1311, + 1227, + 1234, + 1108, + 1109, + 1007, + 1010, + 1226, + 1233 + ] + } + ], + "cmd": "@kpSearch --query \"What was Adrian's sentiment when he talked about Skynet?\"" + }, + { + "searchText": "What books did Adrian write?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What books did Adrian write?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "write" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "write", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "write", + "weight": 10 + }, + { + "text": "writing", + "weight": 0.916671758607272 + }, + { + "text": "writer", + "weight": 0.8859531328129586 + }, + { + "text": "writers", + "weight": 0.8830138916746346 + }, + { + "text": "wrote", + "weight": 0.8824147724645259 + }, + { + "text": "rewrite", + "weight": 0.8813278726082592 + }, + { + "text": "read", + "weight": 0.8801699481129603 + }, + { + "text": "be written", + "weight": 0.8729017448256544 + }, + { + "text": "send", + "weight": 0.8600174607250264 + }, + { + "text": "publish", + "weight": 0.8591007471566714 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "talk", + "weight": 0.8534960957714863 + }, + { + "text": "go off and write", + "weight": 0.8527989697679055 + }, + { + "text": "being written", + "weight": 0.8520617011719704 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "write", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "write", + "weight": 10 + }, + { + "text": "writing", + "weight": 0.916671758607272 + }, + { + "text": "writer", + "weight": 0.8859531328129586 + }, + { + "text": "writers", + "weight": 0.8830138916746346 + }, + { + "text": "wrote", + "weight": 0.8824147724645259 + }, + { + "text": "rewrite", + "weight": 0.8813278726082592 + }, + { + "text": "read", + "weight": 0.8801699481129603 + }, + { + "text": "be written", + "weight": 0.8729017448256544 + }, + { + "text": "send", + "weight": 0.8600174607250264 + }, + { + "text": "publish", + "weight": 0.8591007471566714 + }, + { + "text": "paper", + "weight": 0.8539933641646713 + }, + { + "text": "reading", + "weight": 0.8539764684441886 + }, + { + "text": "talk", + "weight": 0.8534960957714863 + }, + { + "text": "go off and write", + "weight": 0.8527989697679055 + }, + { + "text": "being written", + "weight": 0.8520617011719704 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "What books did Adrian write?" + } + ], + "results": [ + { + "messageMatches": [ + 96, + 19, + 41, + 45, + 47, + 6, + 1, + 5, + 23, + 25, + 27, + 99, + 15 + ], + "entityMatches": [ + 575, + 1516, + 0, + 21, + 26, + 70, + 90, + 110, + 120, + 130, + 200, + 220, + 230, + 476, + 491, + 567, + 1547, + 801, + 1518, + 1, + 2, + 20, + 22, + 25, + 27, + 71, + 72, + 91, + 92, + 111, + 112, + 121, + 122, + 131, + 132, + 201, + 202, + 221, + 222, + 231, + 232, + 475, + 477, + 490, + 492, + 641, + 750, + 1047, + 533, + 568, + 569, + 577, + 640, + 749, + 781, + 576, + 1066, + 1545, + 1546, + 751, + 1049, + 687, + 1005, + 1004, + 1061 + ], + "topicMatches": [ + 585, + 541, + 573, + 763, + 788, + 540, + 761, + 786, + 811, + 1559, + 589, + 646, + 812 + ], + "actionMatches": [ + 1551, + 1555, + 1520, + 1522, + 581, + 582, + 583, + 1550, + 1554, + 3, + 4, + 23, + 28, + 73, + 74, + 93, + 94, + 113, + 114, + 123, + 124, + 133, + 134, + 203, + 204, + 223, + 224, + 233, + 234, + 478, + 493, + 570, + 571, + 1007, + 1009, + 1010, + 1012, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 1548, + 1552, + 755, + 1068, + 1070, + 642, + 643, + 644, + 645, + 759, + 691, + 758, + 760, + 1521, + 1523 + ] + } + ], + "cmd": "@kpSearch --query \"What books did Adrian write?\"" + }, + { + "searchText": "What books did Kevin write?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What books did Kevin write?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "write" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "write", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "write", + "weight": 10 + }, + { + "text": "writing", + "weight": 0.916671758607272 + }, + { + "text": "writer", + "weight": 0.8859531328129586 + }, + { + "text": "writers", + "weight": 0.8830138916746346 + }, + { + "text": "wrote", + "weight": 0.8824147724645259 + }, + { + "text": "rewrite", + "weight": 0.8813278726082592 + }, + { + "text": "read", + "weight": 0.8801699481129603 + }, + { + "text": "be written", + "weight": 0.8729017448256544 + }, + { + "text": "send", + "weight": 0.8600174607250264 + }, + { + "text": "publish", + "weight": 0.8591007471566714 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "talk", + "weight": 0.8534960957714863 + }, + { + "text": "go off and write", + "weight": 0.8527989697679055 + }, + { + "text": "being written", + "weight": 0.8520617011719704 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "write", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "write", + "weight": 10 + }, + { + "text": "writing", + "weight": 0.916671758607272 + }, + { + "text": "writer", + "weight": 0.8859531328129586 + }, + { + "text": "writers", + "weight": 0.8830138916746346 + }, + { + "text": "wrote", + "weight": 0.8824147724645259 + }, + { + "text": "rewrite", + "weight": 0.8813278726082592 + }, + { + "text": "read", + "weight": 0.8801699481129603 + }, + { + "text": "be written", + "weight": 0.8729017448256544 + }, + { + "text": "send", + "weight": 0.8600174607250264 + }, + { + "text": "publish", + "weight": 0.8591007471566714 + }, + { + "text": "paper", + "weight": 0.8539933641646713 + }, + { + "text": "reading", + "weight": 0.8539764684441886 + }, + { + "text": "talk", + "weight": 0.8534960957714863 + }, + { + "text": "go off and write", + "weight": 0.8527989697679055 + }, + { + "text": "being written", + "weight": 0.8520617011719704 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "What books did Kevin write?" + } + ], + "results": [ + { + "messageMatches": [ + 2, + 24, + 48, + 62, + 66, + 91, + 95, + 16, + 22, + 78, + 99, + 6 + ], + "entityMatches": [ + 5, + 25, + 75, + 105, + 115, + 235, + 305, + 325, + 385, + 450, + 470, + 490, + 544, + 1463, + 1464, + 1199, + 6, + 7, + 26, + 27, + 76, + 77, + 106, + 107, + 116, + 117, + 236, + 237, + 306, + 307, + 326, + 327, + 386, + 387, + 451, + 452, + 471, + 472, + 491, + 492, + 575, + 651, + 770, + 1084, + 577, + 769, + 1198, + 1468, + 738, + 653, + 1545, + 1546, + 1264, + 1547, + 1200, + 545, + 1504, + 1081, + 652, + 1262, + 1465, + 1466, + 576 + ], + "topicMatches": [ + 659, + 747, + 1380, + 1559, + 589, + 1381, + 660 + ], + "actionMatches": [ + 772, + 8, + 9, + 28, + 29, + 78, + 79, + 108, + 109, + 118, + 119, + 238, + 239, + 308, + 309, + 328, + 329, + 388, + 389, + 453, + 454, + 473, + 474, + 493, + 494, + 550, + 553, + 555, + 773, + 776, + 549, + 554, + 740, + 741, + 743, + 744, + 548, + 655, + 657, + 1089, + 1095, + 774, + 775, + 1376, + 1377, + 1378, + 1548, + 1552, + 1551, + 1555, + 1096, + 1085, + 1086, + 1091, + 1092, + 654, + 656, + 739, + 742, + 1201, + 1204, + 1202, + 1205, + 1268, + 1274, + 1088, + 1094, + 1087, + 1269, + 1275 + ] + } + ], + "cmd": "@kpSearch --query \"What books did Kevin write?\"" + }, + { + "searchText": "Summarize Kevin's thoughts on Artificial Intelligence?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize Kevin's thoughts on Artificial Intelligence", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "isInformational": false + }, + "searchTerms": [ + "Artificial Intelligence" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "artificial intelligence", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "human-seeming intelligence", + "weight": 0.8864734409947517 + }, + { + "text": "artificial", + "weight": 0.880934040286398 + }, + { + "text": "strong ai", + "weight": 0.8793549419006241 + }, + { + "text": "generative ai", + "weight": 0.8768135499015218 + }, + { + "text": "generative ai and creativity", + "weight": 0.8763137497687796 + }, + { + "text": "emergent ai", + "weight": 0.8727685866397861 + }, + { + "text": "topic: superhuman ai", + "weight": 0.8724873582946049 + }, + { + "text": "science fiction ai", + "weight": 0.8721353651769909 + }, + { + "text": "ai super intelligence", + "weight": 0.8712886210805108 + }, + { + "text": "artificially generated", + "weight": 0.8700178090671777 + }, + { + "text": "chatbots", + "weight": 0.8685584253229648 + }, + { + "text": "robot behavior", + "weight": 0.8682360373003561 + }, + { + "text": "superhuman ai", + "weight": 0.8672326833864481 + }, + { + "text": "intelligent life", + "weight": 0.8665644533589565 + }, + { + "text": "chatbot", + "weight": 0.8657365939584234 + }, + { + "text": "generative ai systems", + "weight": 0.8626742573760334 + }, + { + "text": "ai and human interaction", + "weight": 0.862162478298934 + }, + { + "text": "intelligence", + "weight": 0.8613737945944341 + }, + { + "text": "written by ai", + "weight": 0.8584006982933475 + }, + { + "text": "artificial events", + "weight": 0.8573337764997134 + }, + { + "text": "self-awareness in chatbots", + "weight": 0.8543346080058085 + }, + { + "text": "skynet", + "weight": 0.8542080117789144 + }, + { + "text": "robot butlers", + "weight": 0.8502714496515634 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "Summarize Kevin's thoughts on Artificial Intelligence" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 60, + 40, + 66, + 97, + 93, + 36, + 28, + 48, + 99, + 74, + 2, + 62, + 44, + 4, + 84, + 76, + 95, + 12, + 87, + 72, + 38, + 70, + 68, + 46 + ], + "entityMatches": [ + 5, + 15, + 25, + 35, + 45, + 55, + 65, + 75, + 85, + 95, + 105, + 115, + 125, + 135, + 145, + 155, + 165, + 175, + 185, + 195, + 205, + 215, + 225, + 235, + 245, + 255, + 265, + 275, + 285, + 295, + 305, + 315, + 325, + 335, + 345, + 355, + 365, + 375, + 385, + 395, + 405, + 415, + 430, + 440, + 450, + 460, + 470, + 480, + 490, + 500, + 510, + 520, + 544, + 566, + 982, + 6, + 7, + 16, + 17, + 26, + 27, + 36, + 37, + 46, + 47, + 56, + 57, + 66, + 67, + 76, + 77, + 86, + 87, + 96, + 97, + 106, + 107, + 116, + 117, + 126, + 127, + 136, + 137, + 146, + 147, + 156, + 157, + 166, + 167, + 176, + 177, + 186, + 187, + 196, + 197, + 206, + 207, + 216, + 217, + 226, + 227, + 236, + 237, + 246, + 247, + 256, + 257, + 266, + 267, + 276, + 277, + 286, + 287, + 296, + 297, + 306, + 307, + 316, + 317, + 326, + 327, + 336, + 337, + 346, + 347, + 356, + 357, + 366, + 367, + 376, + 377, + 386, + 387, + 396, + 397, + 406, + 407, + 416, + 417, + 431, + 432, + 441, + 442, + 451, + 452, + 461, + 462, + 471, + 472, + 481, + 482, + 491, + 492, + 501, + 502, + 511, + 512, + 521, + 522, + 575, + 597, + 610, + 629, + 651, + 770, + 818, + 887, + 920, + 980, + 981, + 983, + 985, + 1031, + 1032, + 1084, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1456, + 1463, + 1478, + 1146, + 1335, + 1029, + 1033, + 1545, + 1546, + 886, + 821, + 1264, + 1547, + 1334, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 884, + 1175, + 1177, + 1480, + 1245, + 545, + 923, + 984, + 576, + 1464, + 1199, + 921, + 1174 + ], + "topicMatches": [ + 934, + 897, + 832, + 1339, + 937, + 1001, + 898 + ], + "actionMatches": [ + 929, + 8, + 9, + 18, + 19, + 28, + 29, + 38, + 39, + 48, + 49, + 58, + 59, + 68, + 69, + 78, + 79, + 88, + 89, + 98, + 99, + 108, + 109, + 118, + 119, + 128, + 129, + 138, + 139, + 148, + 149, + 158, + 159, + 168, + 169, + 178, + 179, + 188, + 189, + 198, + 199, + 208, + 209, + 218, + 219, + 228, + 229, + 238, + 239, + 248, + 249, + 258, + 259, + 268, + 269, + 278, + 279, + 288, + 289, + 298, + 299, + 308, + 309, + 318, + 319, + 328, + 329, + 338, + 339, + 348, + 349, + 358, + 359, + 368, + 369, + 378, + 379, + 388, + 389, + 398, + 399, + 408, + 409, + 418, + 419, + 433, + 434, + 443, + 444, + 453, + 454, + 463, + 464, + 473, + 474, + 483, + 484, + 493, + 494, + 503, + 504, + 513, + 514, + 523, + 524, + 550, + 553, + 555, + 549, + 554, + 548, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1548, + 1552, + 925, + 889, + 892, + 1096, + 825, + 829, + 822, + 824, + 826, + 828, + 1338, + 1336, + 1337, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 682, + 683, + 739, + 740, + 741, + 742, + 743, + 744, + 1201, + 1204, + 1488, + 888, + 1202, + 1205, + 1268, + 1274, + 1087, + 1269, + 1275, + 890, + 893, + 823, + 827, + 1178, + 1182, + 1485 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize Kevin's thoughts on Artificial Intelligence?\"" + }, + { + "searchText": "Summarize Kevin's thoughts on AI?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize Kevin's thoughts on AI", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "AI", + "isNamePronoun": false + } + ], + "isInformational": false + }, + "searchTerms": [ + "thoughts", + "summarize" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "ai", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "ais", + "weight": 0.9110748632772587 + }, + { + "text": "ai use", + "weight": 0.9048274017733158 + }, + { + "text": "strong ai", + "weight": 0.8818501002968224 + }, + { + "text": "written by ai", + "weight": 0.8717330209653917 + }, + { + "text": "ai capabilities", + "weight": 0.8716588615106122 + }, + { + "text": "ai programs", + "weight": 0.8712718352030914 + }, + { + "text": "ai and human interaction", + "weight": 0.8707026797994745 + }, + { + "text": "ai development", + "weight": 0.8706364221890454 + }, + { + "text": "ai style program", + "weight": 0.8646452861789973 + }, + { + "text": "ai behavior", + "weight": 0.861913249586693 + }, + { + "text": "i", + "weight": 0.8614415406993372 + }, + { + "text": "ovid", + "weight": 0.861029024626728 + }, + { + "text": "ai super intelligence", + "weight": 0.8604107695793612 + }, + { + "text": "ability", + "weight": 0.859383377041975 + }, + { + "text": "agency", + "weight": 0.8591398648969518 + }, + { + "text": "agent", + "weight": 0.8565706686676895 + }, + { + "text": "intelligence", + "weight": 0.8554817260506392 + }, + { + "text": "impact", + "weight": 0.8516432013758837 + }, + { + "text": "idea", + "weight": 0.8507109971501728 + }, + { + "text": "issue", + "weight": 0.8503399455478627 + } + ] + }, + { + "term": { + "text": "thoughts", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ] + }, + { + "term": { + "text": "summarize", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "understand", + "weight": 0.8551821527870132 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "ai", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "ai", + "weight": 10 + }, + { + "text": "ais", + "weight": 0.9110748632772587 + }, + { + "text": "ai use", + "weight": 0.9048274017733158 + }, + { + "text": "strong ai", + "weight": 0.8818501002968224 + }, + { + "text": "written by ai", + "weight": 0.8717330209653917 + }, + { + "text": "ai capabilities", + "weight": 0.8716588615106122 + }, + { + "text": "ai programs", + "weight": 0.8712718352030914 + }, + { + "text": "ai and human interaction", + "weight": 0.8707026797994745 + }, + { + "text": "ai development", + "weight": 0.8706364221890454 + }, + { + "text": "ai style program", + "weight": 0.8646452861789973 + }, + { + "text": "ai behavior", + "weight": 0.861913249586693 + }, + { + "text": "i", + "weight": 0.8614415406993372 + }, + { + "text": "ovid", + "weight": 0.861029024626728 + }, + { + "text": "ai super intelligence", + "weight": 0.8604107695793612 + }, + { + "text": "ability", + "weight": 0.859383377041975 + }, + { + "text": "agency", + "weight": 0.8591398648969518 + }, + { + "text": "agent", + "weight": 0.8565706686676895 + }, + { + "text": "intelligence", + "weight": 0.8554817260506392 + }, + { + "text": "impact", + "weight": 0.8516432013758837 + }, + { + "text": "idea", + "weight": 0.8507109971501728 + }, + { + "text": "issue", + "weight": 0.8503399455478627 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "ai", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "ai", + "weight": 10 + }, + { + "text": "ais", + "weight": 0.9110748632772587 + }, + { + "text": "ai use", + "weight": 0.9048274017733158 + }, + { + "text": "strong ai", + "weight": 0.8818501002968224 + }, + { + "text": "written by ai", + "weight": 0.8717330209653917 + }, + { + "text": "ai capabilities", + "weight": 0.8716588615106122 + }, + { + "text": "ai programs", + "weight": 0.8712718352030914 + }, + { + "text": "ai and human interaction", + "weight": 0.8707026797994745 + }, + { + "text": "ai development", + "weight": 0.8706364221890454 + }, + { + "text": "ai style program", + "weight": 0.8646452861789973 + }, + { + "text": "ai behavior", + "weight": 0.861913249586693 + }, + { + "text": "i", + "weight": 0.8614415406993372 + }, + { + "text": "ovid", + "weight": 0.861029024626728 + }, + { + "text": "ai super intelligence", + "weight": 0.8604107695793612 + }, + { + "text": "ability", + "weight": 0.859383377041975 + }, + { + "text": "agency", + "weight": 0.8591398648969518 + }, + { + "text": "agent", + "weight": 0.8565706686676895 + }, + { + "text": "intelligence", + "weight": 0.8554817260506392 + }, + { + "text": "impact", + "weight": 0.8516432013758837 + }, + { + "text": "idea", + "weight": 0.8507109971501728 + }, + { + "text": "issue", + "weight": 0.8503399455478627 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "ai", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "ai", + "weight": 10 + }, + { + "text": "ais", + "weight": 0.9110748632772587 + }, + { + "text": "ai use", + "weight": 0.9048274017733158 + }, + { + "text": "strong ai", + "weight": 0.8818501002968224 + }, + { + "text": "written by ai", + "weight": 0.8717330209653917 + }, + { + "text": "ai capabilities", + "weight": 0.8716588615106122 + }, + { + "text": "ai programs", + "weight": 0.8712718352030914 + }, + { + "text": "ai and human interaction", + "weight": 0.8707026797994745 + }, + { + "text": "ai development", + "weight": 0.8706364221890454 + }, + { + "text": "ai style program", + "weight": 0.8646452861789973 + }, + { + "text": "ai behavior", + "weight": 0.861913249586693 + }, + { + "text": "i", + "weight": 0.8614415406993372 + }, + { + "text": "ovid", + "weight": 0.861029024626728 + }, + { + "text": "ai super intelligence", + "weight": 0.8604107695793612 + }, + { + "text": "ability", + "weight": 0.859383377041975 + }, + { + "text": "agency", + "weight": 0.8591398648969518 + }, + { + "text": "agent", + "weight": 0.8565706686676895 + }, + { + "text": "intelligence", + "weight": 0.8554817260506392 + }, + { + "text": "impact", + "weight": 0.8516432013758837 + }, + { + "text": "idea", + "weight": 0.8507109971501728 + }, + { + "text": "issue", + "weight": 0.8503399455478627 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Summarize Kevin's thoughts on AI" + } + ], + "results": [ + { + "messageMatches": [ + 40, + 60, + 62, + 66, + 74, + 91, + 93, + 97, + 99 + ], + "entityMatches": [ + 195, + 295, + 305, + 325, + 365, + 450, + 460, + 480, + 490, + 982, + 1174, + 1197, + 1334, + 1465, + 1466, + 1480, + 1482, + 1528, + 1546, + 1175, + 1177 + ], + "topicMatches": [ + 1472, + 1539, + 1558, + 1186, + 1287, + 1001, + 1191 + ], + "actionMatches": [ + 198, + 199, + 298, + 299, + 308, + 309, + 328, + 329, + 368, + 369, + 453, + 454, + 463, + 464, + 483, + 484, + 493, + 494, + 1178, + 1182, + 1531, + 1533, + 1534, + 1535, + 1537, + 1538, + 1485, + 1549, + 1550, + 1553, + 1554, + 1336, + 1337, + 1269, + 1275 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize Kevin's thoughts on AI?\"" + }, + { + "searchText": "Summarize Adrian's thoughts to Kevin?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize Adrian's thoughts to Kevin", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Summarize Adrian's thoughts to Kevin" + } + ], + "results": [ + { + "messageMatches": [ + 86, + 12, + 104, + 99, + 102, + 5, + 6, + 39, + 96, + 63, + 27, + 23, + 67, + 73, + 21, + 65, + 35, + 59, + 13, + 25, + 85, + 37, + 19, + 49, + 1 + ], + "entityMatches": [ + 575, + 1516, + 1547, + 1567, + 0, + 21, + 26, + 56, + 60, + 70, + 80, + 90, + 100, + 110, + 120, + 130, + 140, + 150, + 160, + 170, + 180, + 190, + 200, + 210, + 220, + 230, + 240, + 250, + 260, + 270, + 280, + 290, + 300, + 310, + 320, + 330, + 340, + 350, + 360, + 370, + 380, + 390, + 400, + 410, + 420, + 426, + 476, + 491, + 506, + 516, + 567, + 610, + 1434, + 1580, + 1435, + 1, + 22, + 25, + 55, + 61, + 71, + 81, + 91, + 101, + 111, + 121, + 131, + 141, + 151, + 161, + 171, + 181, + 191, + 201, + 211, + 221, + 231, + 241, + 251, + 261, + 271, + 281, + 291, + 301, + 311, + 321, + 331, + 341, + 351, + 361, + 371, + 381, + 391, + 401, + 411, + 421, + 427, + 477, + 490, + 507, + 517, + 576 + ], + "topicMatches": [ + 585, + 1442, + 1595 + ], + "actionMatches": [ + 1440, + 3, + 28, + 58, + 63, + 73, + 83, + 93, + 103, + 113, + 123, + 133, + 143, + 153, + 163, + 173, + 183, + 193, + 203, + 213, + 223, + 233, + 243, + 253, + 263, + 273, + 283, + 293, + 303, + 313, + 323, + 333, + 343, + 353, + 363, + 373, + 383, + 393, + 403, + 413, + 423, + 493, + 581, + 582, + 583, + 1520, + 1522, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 4, + 23, + 64, + 74, + 84, + 94, + 104, + 114, + 124, + 134, + 144, + 154, + 164, + 174, + 184, + 194, + 204, + 214, + 224, + 234, + 244, + 254, + 264, + 274, + 284, + 294, + 304, + 314, + 324, + 334, + 344, + 354, + 364, + 374, + 384, + 394, + 404, + 414, + 424, + 428, + 478, + 508, + 518, + 570, + 571, + 618, + 619, + 1584, + 1589, + 24, + 29, + 59, + 429, + 479, + 494, + 509, + 519 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize Adrian's thoughts to Kevin?\"" + }, + { + "searchText": "Summarize Adrian's ideas about the Terminator?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize Adrian's ideas about the Terminator", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Terminator", + "isNamePronoun": false, + "type": [ + "movie" + ] + } + ], + "isInformational": false + }, + "searchTerms": [ + "ideas" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "terminator", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 0.8612655359336829 + } + ] + }, + { + "term": { + "text": "movie", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8668695892814374 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8564648013190839 + }, + { + "text": "writers", + "weight": 0.8559054574599632 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8521043786323057 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ] + }, + { + "term": { + "text": "ideas", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "idea", + "weight": 10 + }, + { + "text": "tips", + "weight": 0.9009611247794362 + }, + { + "text": "concept", + "weight": 0.895375708176021 + }, + { + "text": "issues", + "weight": 0.8814746675662343 + }, + { + "text": "topics", + "weight": 0.8763918115220087 + }, + { + "text": "think", + "weight": 0.8717235567910049 + }, + { + "text": "articles", + "weight": 0.8705985708784312 + }, + { + "text": "questions", + "weight": 0.8655782609572623 + }, + { + "text": "insights", + "weight": 0.8572649447613849 + }, + { + "text": "technology", + "weight": 0.8555352142988235 + }, + { + "text": "research", + "weight": 0.8555291634829225 + }, + { + "text": "objective", + "weight": 0.8554360304769657 + }, + { + "text": "information", + "weight": 0.8538382760131212 + }, + { + "text": "queries", + "weight": 0.8533603181789858 + }, + { + "text": "intelligence", + "weight": 0.8515286329139787 + }, + { + "text": "things", + "weight": 0.8512206234435337 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "terminator", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "terminator", + "weight": 10 + }, + { + "text": "skynet", + "weight": 0.8612655359336829 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "terminator", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "terminator", + "weight": 10 + }, + { + "text": "skynet", + "weight": 0.8612655359336829 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "terminator", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "terminator", + "weight": 10 + }, + { + "text": "skynet", + "weight": 0.8612655359336829 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "Summarize Adrian's ideas about the Terminator" + } + ], + "results": [ + { + "messageMatches": [ + 67, + 69, + 71 + ], + "entityMatches": [ + 330, + 340, + 350, + 1290, + 1301, + 1307, + 1289, + 1292 + ], + "topicMatches": [ + 1297, + 1304 + ], + "actionMatches": [ + 333, + 334, + 343, + 344, + 353, + 354, + 1302, + 1303, + 1293, + 1308, + 1309, + 1310, + 1311 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize Adrian's ideas about the Terminator?\"" + }, + { + "searchText": "What are Adrian's interests and hobbies?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What are Adrian's interests and hobbies?", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "Adrian", + "isNamePronoun": false, + "facets": [ + { + "facetName": "interests", + "facetValue": "*" + }, + { + "facetName": "hobbies", + "facetValue": "*" + } + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": { + "term": { + "text": "facet.name", + "weight": 10 + }, + "relatedTerms": [], + "relatedTermsRequired": true + }, + "propertyValue": { + "term": { + "text": "interests", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "interest", + "weight": 0.9236435075209032 + }, + { + "text": "economic interests", + "weight": 0.9073447699520951 + }, + { + "text": "serves_human_interests", + "weight": 0.8701221453770587 + }, + { + "text": "motivations", + "weight": 0.8601345001594553 + }, + { + "text": "priorities", + "weight": 0.8591089848337814 + }, + { + "text": "interesting", + "weight": 0.8500804915077143 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": { + "term": { + "text": "facet.name", + "weight": 10 + }, + "relatedTerms": [], + "relatedTermsRequired": true + }, + "propertyValue": { + "term": { + "text": "hobbies", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "hobby", + "weight": 0.9498161745784214 + }, + { + "text": "spare time activities", + "weight": 0.8790391146879749 + }, + { + "text": "board games", + "weight": 0.8502480352308724 + } + ], + "relatedTermsRequired": true + } + } + ] + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "What are Adrian's interests and hobbies?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 88, + 5, + 11, + 86, + 6, + 0, + 78, + 81, + 99, + 82, + 104, + 14, + 83, + 18, + 16, + 19, + 21, + 20, + 15, + 102, + 91, + 75, + 40, + 87 + ], + "entityMatches": [ + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 575, + 602, + 1448, + 1516, + 1547, + 1567 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595 + ] + } + ], + "cmd": "@kpSearch --query \"What are Adrian's interests and hobbies?\"" + }, + { + "searchText": "What are some of Adrian's interests?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What are some of Adrian's interests?", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "isInformational": true + }, + "searchTerms": [ + "interests" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "interests", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "interest", + "weight": 0.9236435075209032 + }, + { + "text": "economic interests", + "weight": 0.9073447699520951 + }, + { + "text": "serves_human_interests", + "weight": 0.8701221453770587 + }, + { + "text": "motivations", + "weight": 0.8601345001594553 + }, + { + "text": "priorities", + "weight": 0.8591089848337814 + }, + { + "text": "interesting", + "weight": 0.8500804915077143 + } + ] + } + ] + } + } + ], + "rawQuery": "What are some of Adrian's interests?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 88, + 11, + 5, + 86, + 6, + 0, + 99, + 78, + 81, + 104, + 14, + 102, + 83, + 91, + 19, + 82, + 21, + 40, + 15, + 96, + 16, + 101, + 87, + 72 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1547, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 983, + 1434, + 1580, + 610, + 576, + 1146, + 1528, + 835, + 1176 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 1156 + ], + "actionMatches": [ + 581, + 582, + 583, + 604, + 606, + 1520, + 1522, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1440, + 1584, + 1589, + 668, + 673, + 1548, + 1552, + 550, + 555, + 1179, + 1183 + ] + } + ], + "cmd": "@kpSearch --query \"What are some of Adrian's interests?\"" + }, + { + "searchText": "What does Adrian paint?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What does Adrian paint?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "paint" + ], + "tense": "Present" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "paint", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "paint", + "weight": 10 + }, + { + "text": "painting", + "weight": 0.916028790334119 + }, + { + "text": "drawing", + "weight": 0.8869294518639369 + }, + { + "text": "draw", + "weight": 0.8826160908315798 + }, + { + "text": "paper", + "weight": 0.8752366218716299 + }, + { + "text": "picture", + "weight": 0.8670148500581228 + }, + { + "text": "render", + "weight": 0.8541070072956738 + }, + { + "text": "playing", + "weight": 0.8536792156348054 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "paint", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "paint", + "weight": 10 + }, + { + "text": "painting", + "weight": 0.916028790334119 + }, + { + "text": "drawing", + "weight": 0.8869294518639369 + }, + { + "text": "draw", + "weight": 0.8826160908315798 + }, + { + "text": "paper", + "weight": 0.8752366218716299 + }, + { + "text": "picture", + "weight": 0.8670148500581228 + }, + { + "text": "render", + "weight": 0.8541070072956738 + }, + { + "text": "playing", + "weight": 0.8536792156348054 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "What does Adrian paint?" + } + ], + "results": [ + { + "messageMatches": [ + 63, + 81, + 83 + ], + "entityMatches": [ + 310, + 400, + 410, + 311, + 312, + 401, + 402, + 411, + 412, + 1407, + 1216, + 1219 + ], + "topicMatches": [ + 1402, + 1414, + 1401, + 1413 + ], + "actionMatches": [ + 313, + 314, + 403, + 404, + 413, + 414, + 1411, + 1227, + 1234 + ] + } + ], + "cmd": "@kpSearch --query \"What does Adrian paint?\"" + }, + { + "searchText": "Who is Magnus Carlsen?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who is Magnus Carlsen?", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "Magnus Carlsen", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "magnus carlsen", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "magnus carlsen", + "weight": 10 + }, + { + "text": "chess", + "weight": 0.8515312126686472 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "person", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "person", + "weight": 10 + }, + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ], + "relatedTermsRequired": true + } + } + ] + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "magnus carlsen", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "magnus carlsen", + "weight": 10 + }, + { + "text": "chess", + "weight": 0.8515312126686472 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "Who is Magnus Carlsen?" + } + ], + "results": [ + { + "messageMatches": [ + 40 + ], + "entityMatches": [ + 985 + ], + "topicMatches": [ + 1003 + ] + } + ], + "cmd": "@kpSearch --query \"Who is Magnus Carlsen?\"" + }, + { + "searchText": "What was the name of that chess grandmaster they discussed?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What was the name of the chess grandmaster they discussed?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "discussed" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "person" + ], + "facets": [ + { + "facetName": "profession", + "facetValue": "chess grandmaster" + } + ] + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "discussed", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "profession", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "professions", + "weight": 0.9062057338955631 + }, + { + "text": "career", + "weight": 0.8843828439345859 + }, + { + "text": "job", + "weight": 0.8810864101145465 + }, + { + "text": "legal profession", + "weight": 0.8725351036569126 + }, + { + "text": "graduate", + "weight": 0.8717973313008607 + }, + { + "text": "school", + "weight": 0.8654285994731649 + }, + { + "text": "institution", + "weight": 0.8652247371164589 + }, + { + "text": "industry", + "weight": 0.8644360684955182 + }, + { + "text": "service", + "weight": 0.8554271438523046 + }, + { + "text": "task", + "weight": 0.8542360759338502 + }, + { + "text": "venture", + "weight": 0.8536072134695121 + }, + { + "text": "education", + "weight": 0.8533070872419408 + }, + { + "text": "study", + "weight": 0.8524462127925252 + }, + { + "text": "performance", + "weight": 0.8502792854058963 + }, + { + "text": "working", + "weight": 0.8501010491016996 + } + ] + }, + { + "term": { + "text": "chess grandmaster", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "human grandmasters", + "weight": 0.9119755253823068 + }, + { + "text": "chess", + "weight": 0.8989321678306585 + }, + { + "text": "chess computers", + "weight": 0.8952173403690811 + }, + { + "text": "magnus carlsen", + "weight": 0.8814657530086609 + } + ] + } + ] + } + } + ], + "rawQuery": "What was the name of the chess grandmaster they discussed?" + } + ], + "results": [ + { + "messageMatches": [ + 40, + 12, + 86, + 5, + 64, + 0, + 76, + 20, + 19, + 36, + 63, + 91, + 66, + 43, + 7, + 30, + 37, + 98, + 39, + 92, + 90, + 22, + 87, + 72, + 38 + ], + "entityMatches": [ + 641, + 985, + 662, + 0, + 1, + 2, + 5, + 6, + 7, + 10, + 11, + 12, + 15, + 16, + 17, + 20, + 21, + 22, + 25, + 26, + 27, + 30, + 31, + 32, + 35, + 36, + 37, + 40, + 41, + 42, + 45, + 46, + 47, + 50, + 51, + 52, + 55, + 56, + 57, + 60, + 61, + 62, + 65, + 66, + 67, + 70, + 71, + 72, + 75, + 76, + 77, + 80, + 81, + 82, + 85, + 86, + 87, + 90, + 91, + 92, + 95, + 96, + 97, + 100, + 101, + 102, + 105, + 106, + 107, + 110, + 111, + 112, + 115, + 116, + 117, + 120, + 121, + 122, + 125, + 126, + 127, + 130, + 131, + 132, + 135, + 136, + 137, + 140, + 141, + 142, + 145, + 146, + 147, + 150, + 151, + 152, + 155, + 156, + 157, + 160, + 161, + 162, + 165, + 166, + 167, + 170, + 171, + 172, + 175, + 176, + 177, + 180, + 181, + 182, + 185, + 186, + 187, + 190, + 191, + 192, + 195, + 196, + 197, + 200, + 201, + 202, + 205, + 206, + 207, + 210, + 211, + 212, + 215, + 216, + 217, + 220, + 221, + 222, + 225, + 226, + 227, + 230, + 231, + 232, + 235, + 236, + 237, + 240, + 241, + 242, + 245, + 246, + 247, + 250, + 251, + 252, + 255, + 256, + 257, + 260, + 261, + 262, + 265, + 266, + 267, + 270, + 271, + 272, + 275, + 276, + 277, + 280, + 281, + 282, + 285, + 286, + 287, + 290, + 291, + 292, + 295, + 296, + 297, + 300, + 301, + 302, + 305, + 306, + 307, + 310, + 311, + 312, + 315, + 316, + 317, + 320, + 321, + 322, + 325, + 326, + 327, + 330, + 331, + 332, + 335, + 336, + 337, + 340, + 341, + 342, + 345, + 346, + 347, + 350, + 351, + 352, + 355, + 356, + 357, + 360, + 361, + 362, + 365, + 366, + 367, + 370, + 371, + 372, + 375, + 376, + 377, + 380, + 381, + 382, + 385, + 386, + 387, + 390, + 391, + 392, + 395, + 396, + 397, + 400, + 401, + 402, + 405, + 406, + 407, + 410, + 411, + 412, + 415, + 416, + 417, + 420, + 421, + 422, + 425, + 426, + 427, + 430, + 431, + 432, + 435, + 436, + 437, + 440, + 441, + 442, + 445, + 446, + 447, + 450, + 451, + 452, + 455, + 456, + 457, + 460, + 461, + 462, + 465, + 466, + 467, + 470, + 471, + 472, + 475, + 476, + 477, + 480, + 481, + 482, + 485, + 486, + 487, + 490, + 491, + 492, + 495, + 496, + 497, + 500, + 501, + 502, + 505, + 506, + 507, + 510, + 511, + 512, + 515, + 516, + 517, + 520, + 521, + 522, + 527, + 544, + 561, + 566, + 567, + 575, + 591, + 597, + 602, + 610, + 629, + 651, + 663, + 708, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 983, + 1031, + 1032, + 1047, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1434, + 1435, + 1448, + 1456, + 1463, + 1478, + 1516, + 1518, + 1567, + 1580, + 737, + 1419, + 1420, + 1421, + 1033, + 751, + 1146, + 1335, + 1384, + 986, + 706, + 707, + 966, + 1029, + 1066, + 1545, + 1546, + 630, + 711, + 712, + 886, + 1216, + 1219, + 1264, + 1346, + 1547, + 835, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 1360, + 611, + 661, + 701, + 790, + 782, + 545, + 563, + 1113, + 1582, + 1049, + 859, + 1245, + 984, + 1005, + 864, + 576, + 1464, + 1199, + 1004, + 1106, + 1174, + 1197 + ], + "topicMatches": [ + 1391, + 1414, + 1432, + 1003, + 636, + 705, + 745, + 897, + 746, + 678, + 704, + 730, + 898 + ], + "actionMatches": [ + 903, + 908, + 929, + 1007, + 1009, + 1010, + 1012, + 739, + 742, + 1116, + 995, + 997, + 549, + 554, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 1068, + 1070, + 1294, + 1295, + 996, + 889, + 892, + 1096, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1309, + 703, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 1119, + 1123, + 666, + 671, + 682, + 683, + 740, + 741, + 743, + 744, + 840, + 841, + 844, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 1520, + 1522, + 888, + 904, + 1202, + 1205, + 1268, + 1274, + 1087, + 1269, + 1275, + 759, + 702, + 714, + 715, + 758, + 760, + 890, + 893, + 592, + 593, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"What was the name of that chess grandmaster they discussed?\"" + }, + { + "searchText": "What was Adrian's first book?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What was Adrian's first book?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "was" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "book", + "isNamePronoun": false, + "type": [ + "book" + ], + "facets": [ + { + "facetName": "first", + "facetValue": "*" + } + ] + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "was", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "was", + "weight": 10 + }, + { + "text": "will", + "weight": 0.8852603638195126 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "ais", + "weight": 0.852630848947395 + }, + { + "text": "we", + "weight": 0.8501286734641239 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [] + }, + { + "term": { + "text": "first", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "last", + "weight": 0.9248515816290805 + }, + { + "text": "start", + "weight": 0.8522636760615245 + } + ] + } + ] + } + } + ], + "rawQuery": "What was Adrian's first book?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 5, + 6, + 86, + 11, + 88, + 0, + 91, + 75, + 99, + 15, + 25, + 16, + 23, + 102, + 19, + 104, + 26, + 22, + 34, + 40, + 74, + 17, + 24, + 14 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 1463, + 1547, + 801, + 1518, + 1464, + 1199, + 708, + 1, + 2, + 5, + 7, + 10, + 12, + 15, + 17, + 20, + 22, + 25, + 27, + 30, + 32, + 35, + 37, + 40, + 42, + 45, + 47, + 50, + 52, + 55, + 57, + 61, + 62, + 65, + 67, + 71, + 72, + 75, + 77, + 81, + 82, + 85, + 87, + 91, + 92, + 95, + 97, + 101, + 102, + 105, + 107, + 111, + 112, + 115, + 117, + 121, + 122, + 125, + 127, + 131, + 132, + 135, + 137, + 141, + 142, + 145, + 147, + 151, + 152, + 155, + 157, + 161, + 162, + 165, + 167, + 171, + 172, + 175, + 177, + 181, + 182, + 185, + 187, + 191, + 192, + 195, + 197, + 201, + 202, + 205, + 207, + 211, + 212, + 215, + 217, + 221, + 222, + 225, + 227, + 231, + 232, + 235, + 237, + 241, + 242, + 245, + 247, + 251, + 252, + 255, + 257, + 261, + 262, + 265, + 267, + 271, + 272, + 275, + 277, + 281, + 282, + 285, + 287, + 291, + 292, + 295, + 297, + 301, + 302, + 305, + 307, + 311, + 312, + 315, + 317, + 321, + 322, + 325, + 327, + 331, + 332, + 335, + 337, + 341, + 342, + 345, + 347, + 351, + 352, + 355, + 357, + 361, + 362, + 365, + 367, + 371, + 372, + 375, + 377, + 381, + 382, + 385, + 387, + 391, + 392, + 395, + 397, + 401, + 402, + 405, + 407, + 411, + 412, + 415, + 417, + 421, + 422, + 425, + 427, + 430, + 432, + 435, + 437, + 440, + 442, + 445, + 447, + 450, + 452, + 455, + 457, + 460, + 462, + 465, + 467, + 470, + 472, + 475, + 477, + 480, + 482, + 485, + 487, + 490, + 492, + 495, + 497, + 500, + 502, + 505, + 507, + 510, + 512, + 515, + 517, + 520, + 522, + 544, + 561, + 566, + 591, + 597, + 629, + 641, + 651, + 663, + 750, + 770, + 818, + 887, + 920, + 980, + 981, + 982, + 985, + 1031, + 1032, + 1047, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1435, + 1456, + 1478, + 533, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 640, + 749, + 769, + 781, + 791, + 852, + 858, + 882, + 883, + 919, + 1198, + 1213, + 1331, + 1345, + 1468, + 1501, + 1565, + 738, + 576, + 1146, + 1335, + 1384, + 653, + 665, + 1333, + 706, + 707, + 966, + 1566, + 886, + 1066, + 1216, + 1219, + 1264, + 1346, + 751, + 835, + 885, + 1200, + 681, + 819, + 820, + 833, + 834, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 662, + 1360, + 545, + 563, + 1113, + 1582, + 1504, + 1049, + 800, + 712, + 1033, + 859, + 611, + 661, + 1081, + 652, + 1450, + 1484, + 680, + 687, + 1262, + 1383, + 1394, + 1395, + 984, + 1005, + 713, + 799, + 864, + 1465, + 1466, + 1004, + 1061, + 1174 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 541, + 573, + 763, + 788, + 1391, + 1414, + 1559, + 626, + 897, + 589, + 862, + 898, + 625, + 646, + 812, + 1381, + 660, + 1348, + 685, + 1577, + 540, + 594, + 659, + 676, + 736, + 747, + 761, + 786, + 811, + 1380, + 1443, + 596 + ], + "actionMatches": [ + 772, + 1551, + 1555, + 1520, + 1522, + 1571, + 1575, + 581, + 582, + 583, + 604, + 606, + 1550, + 1554, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1584, + 1589, + 903, + 908, + 929, + 1007, + 1009, + 1010, + 1012, + 535, + 536, + 537, + 538, + 539, + 752, + 753, + 754, + 773, + 776, + 793, + 794, + 795, + 740, + 741, + 743, + 744, + 1440, + 655, + 657, + 1089, + 1095, + 592, + 593, + 757, + 549, + 554, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1387, + 1388, + 1411, + 1548, + 1552, + 843, + 1227, + 1234, + 1321, + 1323, + 755, + 669, + 674, + 889, + 892, + 1096, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1309, + 759, + 992, + 993, + 1085, + 1086, + 1091, + 1092, + 654, + 656, + 1119, + 1123, + 890, + 893, + 682, + 683, + 739, + 742, + 840, + 841, + 844, + 1116, + 1117, + 1120, + 1121, + 1201, + 1204, + 1488, + 806, + 888, + 904, + 1202, + 1205, + 1268, + 1274, + 666, + 671, + 1088, + 1094, + 1087, + 1269, + 1275, + 691, + 839, + 632, + 1570, + 1574, + 758, + 760, + 721, + 729, + 805, + 807, + 1521, + 1523, + 823, + 827, + 868, + 870, + 874, + 876, + 905, + 910, + 1178, + 1182, + 1485, + 702, + 714, + 715, + 548 + ] + } + ], + "cmd": "@kpSearch --query \"What was Adrian's first book?\"" + }, + { + "searchText": "List all novels published in 2008", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "List all novels published in 2008", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "novel" + ], + "facets": [ + { + "facetName": "publication year", + "facetValue": "2008" + } + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "novel", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "novel", + "weight": 10 + }, + { + "text": "novels", + "weight": 0.9406746363545382 + }, + { + "text": "fiction", + "weight": 0.8675137710740498 + }, + { + "text": "book", + "weight": 0.8664136885729098 + }, + { + "text": "sci-fi novels", + "weight": 0.8638489276404201 + }, + { + "text": "another book", + "weight": 0.8599596655070945 + }, + { + "text": "science fiction novels", + "weight": 0.854323770283607 + }, + { + "text": "fantasy novels", + "weight": 0.8525247926760322 + }, + { + "text": "book series", + "weight": 0.8514328625189412 + }, + { + "text": "literature", + "weight": 0.85100465991343 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": { + "term": { + "text": "publication year", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "year", + "weight": 0.870155745046179 + } + ], + "relatedTermsRequired": true + }, + "propertyValue": { + "term": { + "text": "2008", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "2008", + "weight": 10 + }, + { + "text": "1980s", + "weight": 0.8547312176000872 + } + ], + "relatedTermsRequired": true + } + } + ] + }, + { + "propertyName": { + "term": { + "text": "2008", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "2008", + "weight": 10 + }, + { + "text": "1980s", + "weight": 0.8547312176000872 + } + ], + "relatedTermsRequired": true + }, + "propertyValue": { + "term": { + "text": "topic", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "topic", + "weight": 10 + }, + { + "text": "topics", + "weight": 10 + }, + { + "text": "subject", + "weight": 0.9244286516272775 + }, + { + "text": "title", + "weight": 0.9172125632827948 + }, + { + "text": "headline", + "weight": 0.9063936787745248 + }, + { + "text": "article", + "weight": 0.8987077901844163 + }, + { + "text": "articles", + "weight": 0.8852519425963353 + }, + { + "text": "concept", + "weight": 0.8830720299308233 + }, + { + "text": "issues", + "weight": 0.8789811032732467 + }, + { + "text": "story", + "weight": 0.8740800471468706 + }, + { + "text": "genre", + "weight": 0.8738597675354443 + }, + { + "text": "objective", + "weight": 0.8726506328424951 + }, + { + "text": "idea", + "weight": 0.8713625199275622 + }, + { + "text": "interest", + "weight": 0.8705433302959388 + }, + { + "text": "content", + "weight": 0.8700402772944661 + }, + { + "text": "context", + "weight": 0.8659783051183786 + }, + { + "text": "talk", + "weight": 0.8651888538238937 + }, + { + "text": "comment", + "weight": 0.8640479131852651 + }, + { + "text": "entity", + "weight": 0.8631240831976681 + }, + { + "text": "task", + "weight": 0.8629106911905021 + }, + { + "text": "stories", + "weight": 0.8622411349432463 + }, + { + "text": "attention", + "weight": 0.861751423606013 + }, + { + "text": "question", + "weight": 0.861229054169688 + }, + { + "text": "paper", + "weight": 0.8609014719580561 + }, + { + "text": "questions", + "weight": 0.8605757119049169 + }, + { + "text": "movie", + "weight": 0.8604859705769485 + }, + { + "text": "feature", + "weight": 0.8586640763250176 + }, + { + "text": "fiction", + "weight": 0.857902088165245 + }, + { + "text": "issue", + "weight": 0.8578367845524639 + }, + { + "text": "project", + "weight": 0.8577814571246507 + }, + { + "text": "event", + "weight": 0.8573353488820965 + }, + { + "text": "activity", + "weight": 0.8551864120361459 + }, + { + "text": "stuff", + "weight": 0.8550900722810694 + }, + { + "text": "job", + "weight": 0.8549337715752651 + }, + { + "text": "tips", + "weight": 0.8546512624601585 + }, + { + "text": "episode", + "weight": 0.8544778516134902 + }, + { + "text": "action", + "weight": 0.8544739262402108 + }, + { + "text": "picture", + "weight": 0.8544639302645739 + }, + { + "text": "mention", + "weight": 0.8536847758494411 + }, + { + "text": "impact", + "weight": 0.8535074406761329 + }, + { + "text": "sites", + "weight": 0.8528474712582808 + }, + { + "text": "object", + "weight": 0.8527627098598423 + }, + { + "text": "technology", + "weight": 0.8520606658601805 + }, + { + "text": "view", + "weight": 0.852020718657536 + }, + { + "text": "queries", + "weight": 0.8516179057270591 + }, + { + "text": "purpose", + "weight": 0.8514263006873108 + }, + { + "text": "problem", + "weight": 0.8513422460870528 + }, + { + "text": "author", + "weight": 0.8510317626555526 + }, + { + "text": "published", + "weight": 0.8509244144956545 + }, + { + "text": "goal", + "weight": 0.8506191378690882 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "List all novels published in 2008" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 91, + 48, + 15 + ], + "entityMatches": [ + 612, + 1464, + 1081, + 641 + ] + } + ], + "cmd": "@kpSearch --query \"List all novels published in 2008\"" + }, + { + "searchText": "What points did they make about Skynet?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What points did they make about Skynet?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "make" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "Skynet", + "isNamePronoun": false + } + ], + "isInformational": false + }, + "searchTerms": [ + "points" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "make", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "make", + "weight": 10 + }, + { + "text": "produce", + "weight": 0.8856687807352839 + }, + { + "text": "create", + "weight": 0.8855465279479602 + }, + { + "text": "make seem", + "weight": 0.8816574027206837 + }, + { + "text": "build", + "weight": 0.8751778328313942 + }, + { + "text": "generate", + "weight": 0.8691822704169274 + }, + { + "text": "be made by", + "weight": 0.8663627895527255 + }, + { + "text": "do", + "weight": 0.8649313278879869 + }, + { + "text": "make better", + "weight": 0.8595175402363454 + }, + { + "text": "take", + "weight": 0.8589000129351847 + }, + { + "text": "get", + "weight": 0.8569984706087905 + }, + { + "text": "fix", + "weight": 0.8521057014407992 + }, + { + "text": "try", + "weight": 0.8509204378552674 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ] + }, + { + "term": { + "text": "points", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "tips", + "weight": 0.8793746333964906 + }, + { + "text": "steps", + "weight": 0.8590766730978444 + }, + { + "text": "sites", + "weight": 0.855871219716682 + }, + { + "text": "position", + "weight": 0.8553868685680567 + }, + { + "text": "topics", + "weight": 0.854986467931403 + }, + { + "text": "issues", + "weight": 0.8545891723550025 + }, + { + "text": "quotes", + "weight": 0.8519855150123115 + }, + { + "text": "articles", + "weight": 0.8516604019693829 + }, + { + "text": "links", + "weight": 0.8501315936866392 + } + ] + } + ] + } + } + ], + "rawQuery": "What points did they make about Skynet?" + } + ], + "results": [ + { + "messageMatches": [ + 67, + 69, + 71, + 51, + 91, + 55, + 41, + 63, + 64, + 59, + 73, + 47, + 53, + 7, + 14, + 61, + 66, + 37, + 5, + 96, + 88 + ], + "entityMatches": [ + 1290, + 1301, + 1307, + 1106, + 1466, + 1131, + 1004, + 1220, + 1245, + 1219, + 1163, + 1315, + 1065, + 1114, + 1107, + 1192, + 941, + 1061, + 567, + 1517 + ], + "topicMatches": [ + 1297, + 1304, + 595, + 1477, + 1244, + 1241, + 1167, + 1324, + 1329, + 960, + 572, + 1455 + ], + "actionMatches": [ + 1293, + 1302, + 1303, + 1308, + 1309, + 1310, + 1311, + 592, + 593, + 1108, + 1109, + 634, + 1133, + 1135, + 1136, + 1138, + 1007, + 1010, + 1227, + 1234, + 1267, + 1273, + 1226, + 1233, + 1225, + 1232, + 1067, + 1068, + 1069, + 1070, + 1117, + 1118, + 1121, + 1122 + ] + } + ], + "cmd": "@kpSearch --query \"What points did they make about Skynet?\"" + }, + { + "searchText": "How long did Adrian struggle before he got published?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "How long did Adrian struggle before he got published?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "struggle" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "published", + "isNamePronoun": false, + "type": [ + "book", + "article" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "struggle", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "struggle", + "weight": 10 + }, + { + "text": "be struggled by", + "weight": 0.8967251422709877 + }, + { + "text": "overcome", + "weight": 0.8645852041710799 + }, + { + "text": "effort", + "weight": 0.862557222524668 + }, + { + "text": "clash", + "weight": 0.8624566833487255 + }, + { + "text": "war", + "weight": 0.8523889281663883 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.874395949048388 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.855980794294105 + }, + { + "text": "publisher", + "weight": 0.9238584908072488 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8564355738986965 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "published", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "publish", + "weight": 0.9466901414729567 + }, + { + "text": "publishing", + "weight": 0.9068855980535531 + }, + { + "text": "be published by", + "weight": 0.9027394184999376 + }, + { + "text": "getting published", + "weight": 0.8973877121812881 + }, + { + "text": "articles", + "weight": 10 + }, + { + "text": "publishers", + "weight": 0.8842397385377393 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "being tried to get published", + "weight": 0.8762253147817652 + }, + { + "text": "trying to get published", + "weight": 0.8741016640934969 + }, + { + "text": "authors", + "weight": 0.868086074894639 + }, + { + "text": "headline", + "weight": 0.8932503111528619 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "research", + "weight": 0.8627977163720204 + }, + { + "text": "produce", + "weight": 0.8619670263956379 + }, + { + "text": "issues", + "weight": 0.8602403096787199 + }, + { + "text": "books", + "weight": 10 + }, + { + "text": "writing", + "weight": 0.8588394156795787 + }, + { + "text": "working", + "weight": 0.8573335334005833 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "performance", + "weight": 0.8549662438120151 + }, + { + "text": "publishing industry", + "weight": 0.8546768096288546 + }, + { + "text": "journalistic articles", + "weight": 0.8745630821861639 + }, + { + "text": "playing", + "weight": 0.8537233089691992 + }, + { + "text": "project", + "weight": 0.8529689373494886 + }, + { + "text": "writers", + "weight": 0.8520978851873733 + }, + { + "text": "topic", + "weight": 0.8987077901844163 + }, + { + "text": "title", + "weight": 0.8763215034012426 + }, + { + "text": "submit", + "weight": 0.8502574806455532 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8694127319933437 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8614043398361213 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.8536612626669662 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "article", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "topics", + "weight": 0.8776182138003489 + }, + { + "text": "episode", + "weight": 0.8699461125840646 + }, + { + "text": "content", + "weight": 0.8691347125182822 + }, + { + "text": "comment", + "weight": 0.8652610608750504 + }, + { + "text": "10000 articles", + "weight": 0.8610997948200644 + }, + { + "text": "programs writing articles", + "weight": 0.8593631942715887 + }, + { + "text": "artifact", + "weight": 0.8591300699739347 + }, + { + "text": "quality of articles", + "weight": 0.8571839678502824 + }, + { + "text": "event", + "weight": 0.8560015230672905 + }, + { + "text": "question", + "weight": 0.8551006842288194 + }, + { + "text": "concept", + "weight": 0.8548819358015549 + }, + { + "text": "action", + "weight": 0.8548231760565069 + }, + { + "text": "feature", + "weight": 0.8538664428186585 + }, + { + "text": "talk", + "weight": 0.8533890081965977 + }, + { + "text": "idea", + "weight": 0.8522854243283808 + }, + { + "text": "task", + "weight": 0.8500593947587631 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "struggle", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "struggle", + "weight": 10 + }, + { + "text": "be struggled by", + "weight": 0.8967251422709877 + }, + { + "text": "overcome", + "weight": 0.8645852041710799 + }, + { + "text": "effort", + "weight": 0.862557222524668 + }, + { + "text": "clash", + "weight": 0.8624566833487255 + }, + { + "text": "war", + "weight": 0.8523889281663883 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "published", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "published", + "weight": 10 + }, + { + "text": "publish", + "weight": 0.9466901414729567 + }, + { + "text": "publisher", + "weight": 0.9238584908072488 + }, + { + "text": "publishing", + "weight": 0.9068855980535531 + }, + { + "text": "be published by", + "weight": 0.9027394184999376 + }, + { + "text": "getting published", + "weight": 0.8973877121812881 + }, + { + "text": "articles", + "weight": 0.8914377795230433 + }, + { + "text": "publishers", + "weight": 0.8842397385377393 + }, + { + "text": "article", + "weight": 0.8835674420920409 + }, + { + "text": "paper", + "weight": 0.8799453192520594 + }, + { + "text": "being tried to get published", + "weight": 0.8762253147817652 + }, + { + "text": "trying to get published", + "weight": 0.8741016640934969 + }, + { + "text": "authors", + "weight": 0.868086074894639 + }, + { + "text": "headline", + "weight": 0.8658230808532892 + }, + { + "text": "fiction", + "weight": 0.8651379856109854 + }, + { + "text": "writer", + "weight": 0.8638901503179188 + }, + { + "text": "research", + "weight": 0.8627977163720204 + }, + { + "text": "book", + "weight": 0.8625212802008864 + }, + { + "text": "produce", + "weight": 0.8619670263956379 + }, + { + "text": "author", + "weight": 0.861507626216192 + }, + { + "text": "issues", + "weight": 0.8602403096787199 + }, + { + "text": "books", + "weight": 0.8588698012348513 + }, + { + "text": "writing", + "weight": 0.8588394156795787 + }, + { + "text": "working", + "weight": 0.8573335334005833 + }, + { + "text": "book release", + "weight": 0.8564633978228342 + }, + { + "text": "performance", + "weight": 0.8549662438120151 + }, + { + "text": "publishing industry", + "weight": 0.8546768096288546 + }, + { + "text": "journalistic articles", + "weight": 0.8541782473443984 + }, + { + "text": "present", + "weight": 0.854134962194466 + }, + { + "text": "playing", + "weight": 0.8537233089691992 + }, + { + "text": "project", + "weight": 0.8529689373494886 + }, + { + "text": "writers", + "weight": 0.8520978851873733 + }, + { + "text": "topic", + "weight": 0.8509244144956545 + }, + { + "text": "issue", + "weight": 0.8507739085345553 + }, + { + "text": "title", + "weight": 0.8502896481513844 + }, + { + "text": "submit", + "weight": 0.8502574806455532 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "published", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "published", + "weight": 10 + }, + { + "text": "publish", + "weight": 0.9466901414729567 + }, + { + "text": "publisher", + "weight": 0.9238584908072488 + }, + { + "text": "publishing", + "weight": 0.9068855980535531 + }, + { + "text": "be published by", + "weight": 0.9027394184999376 + }, + { + "text": "getting published", + "weight": 0.8973877121812881 + }, + { + "text": "articles", + "weight": 0.8914377795230433 + }, + { + "text": "publishers", + "weight": 0.8842397385377393 + }, + { + "text": "article", + "weight": 0.8835674420920409 + }, + { + "text": "paper", + "weight": 0.8799453192520594 + }, + { + "text": "being tried to get published", + "weight": 0.8762253147817652 + }, + { + "text": "trying to get published", + "weight": 0.8741016640934969 + }, + { + "text": "authors", + "weight": 0.868086074894639 + }, + { + "text": "headline", + "weight": 0.8658230808532892 + }, + { + "text": "fiction", + "weight": 0.8651379856109854 + }, + { + "text": "writer", + "weight": 0.8638901503179188 + }, + { + "text": "research", + "weight": 0.8627977163720204 + }, + { + "text": "book", + "weight": 0.8625212802008864 + }, + { + "text": "produce", + "weight": 0.8619670263956379 + }, + { + "text": "author", + "weight": 0.861507626216192 + }, + { + "text": "issues", + "weight": 0.8602403096787199 + }, + { + "text": "books", + "weight": 0.8588698012348513 + }, + { + "text": "writing", + "weight": 0.8588394156795787 + }, + { + "text": "working", + "weight": 0.8573335334005833 + }, + { + "text": "book release", + "weight": 0.8564633978228342 + }, + { + "text": "performance", + "weight": 0.8549662438120151 + }, + { + "text": "publishing industry", + "weight": 0.8546768096288546 + }, + { + "text": "journalistic articles", + "weight": 0.8541782473443984 + }, + { + "text": "present", + "weight": 0.854134962194466 + }, + { + "text": "playing", + "weight": 0.8537233089691992 + }, + { + "text": "project", + "weight": 0.8529689373494886 + }, + { + "text": "writers", + "weight": 0.8520978851873733 + }, + { + "text": "topic", + "weight": 0.8509244144956545 + }, + { + "text": "issue", + "weight": 0.8507739085345553 + }, + { + "text": "title", + "weight": 0.8502896481513844 + }, + { + "text": "submit", + "weight": 0.8502574806455532 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "published", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "published", + "weight": 10 + }, + { + "text": "publish", + "weight": 0.9466901414729567 + }, + { + "text": "publisher", + "weight": 0.9238584908072488 + }, + { + "text": "publishing", + "weight": 0.9068855980535531 + }, + { + "text": "be published by", + "weight": 0.9027394184999376 + }, + { + "text": "getting published", + "weight": 0.8973877121812881 + }, + { + "text": "articles", + "weight": 0.8914377795230433 + }, + { + "text": "publishers", + "weight": 0.8842397385377393 + }, + { + "text": "article", + "weight": 0.8835674420920409 + }, + { + "text": "paper", + "weight": 0.8799453192520594 + }, + { + "text": "being tried to get published", + "weight": 0.8762253147817652 + }, + { + "text": "trying to get published", + "weight": 0.8741016640934969 + }, + { + "text": "authors", + "weight": 0.868086074894639 + }, + { + "text": "headline", + "weight": 0.8658230808532892 + }, + { + "text": "fiction", + "weight": 0.8651379856109854 + }, + { + "text": "writer", + "weight": 0.8638901503179188 + }, + { + "text": "research", + "weight": 0.8627977163720204 + }, + { + "text": "book", + "weight": 0.8625212802008864 + }, + { + "text": "produce", + "weight": 0.8619670263956379 + }, + { + "text": "author", + "weight": 0.861507626216192 + }, + { + "text": "issues", + "weight": 0.8602403096787199 + }, + { + "text": "books", + "weight": 0.8588698012348513 + }, + { + "text": "writing", + "weight": 0.8588394156795787 + }, + { + "text": "working", + "weight": 0.8573335334005833 + }, + { + "text": "book release", + "weight": 0.8564633978228342 + }, + { + "text": "performance", + "weight": 0.8549662438120151 + }, + { + "text": "publishing industry", + "weight": 0.8546768096288546 + }, + { + "text": "journalistic articles", + "weight": 0.8541782473443984 + }, + { + "text": "present", + "weight": 0.854134962194466 + }, + { + "text": "playing", + "weight": 0.8537233089691992 + }, + { + "text": "project", + "weight": 0.8529689373494886 + }, + { + "text": "writers", + "weight": 0.8520978851873733 + }, + { + "text": "topic", + "weight": 0.8509244144956545 + }, + { + "text": "issue", + "weight": 0.8507739085345553 + }, + { + "text": "title", + "weight": 0.8502896481513844 + }, + { + "text": "submit", + "weight": 0.8502574806455532 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "How long did Adrian struggle before he got published?" + } + ], + "results": [ + { + "messageMatches": [ + 96, + 37, + 63, + 6, + 86, + 1, + 5, + 23, + 25, + 79, + 15, + 99, + 45, + 31, + 27, + 47, + 17, + 21, + 77 + ], + "entityMatches": [ + 575, + 1516, + 0, + 21, + 26, + 70, + 80, + 100, + 110, + 120, + 130, + 150, + 180, + 220, + 230, + 310, + 380, + 390, + 426, + 476, + 491, + 567, + 1434, + 1049, + 1547, + 801, + 1518, + 708, + 1, + 2, + 20, + 22, + 25, + 27, + 71, + 72, + 81, + 82, + 101, + 102, + 111, + 112, + 121, + 122, + 131, + 132, + 151, + 152, + 181, + 182, + 221, + 222, + 231, + 232, + 311, + 312, + 381, + 382, + 391, + 392, + 425, + 427, + 475, + 477, + 490, + 492, + 641, + 663, + 750, + 1047, + 1385, + 1435, + 1065, + 533, + 568, + 569, + 577, + 640, + 749, + 781, + 858, + 1213, + 576, + 751, + 665, + 706, + 707, + 1066, + 1545, + 1546, + 941, + 1216, + 1219, + 662, + 1360, + 712, + 800, + 713, + 799, + 859, + 661, + 1218, + 1438, + 1517, + 1519, + 782, + 1061, + 1384, + 1383 + ], + "topicMatches": [ + 585, + 1442, + 541, + 573, + 763, + 788, + 1391, + 648, + 762, + 787, + 1559, + 765, + 589, + 1057, + 862, + 767, + 646, + 812, + 1073, + 540, + 676, + 736, + 761, + 786, + 811, + 1443, + 1076, + 1390, + 1370 + ], + "actionMatches": [ + 1520, + 1551, + 1555, + 1522, + 536, + 753, + 581, + 582, + 583, + 1550, + 1554, + 3, + 4, + 23, + 28, + 73, + 74, + 83, + 84, + 103, + 104, + 113, + 114, + 123, + 124, + 133, + 134, + 153, + 154, + 183, + 184, + 223, + 224, + 233, + 234, + 313, + 314, + 383, + 384, + 393, + 394, + 428, + 478, + 493, + 570, + 571, + 1067, + 1069, + 535, + 537, + 538, + 539, + 752, + 754, + 1068, + 1070, + 1440, + 643, + 645, + 1227, + 1234, + 759, + 1387, + 1388, + 1548, + 1552, + 757, + 755, + 669, + 674, + 1052, + 1053, + 1054, + 642, + 644, + 1521, + 1523, + 1050, + 946, + 951, + 1051, + 806, + 1225, + 1232, + 721, + 729, + 805, + 807, + 758, + 760, + 666, + 671, + 785, + 714, + 715 + ] + } + ], + "cmd": "@kpSearch --query \"How long did Adrian struggle before he got published?\"" + }, + { + "searchText": "Who mentioned the 'University of Reading'?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who mentioned the 'University of Reading'?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "mentioned" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "University of Reading", + "isNamePronoun": false, + "type": [ + "institution" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "mentioned", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say" + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "university of reading", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "university", + "weight": 0.8704681581471554 + } + ] + }, + { + "term": { + "text": "institution", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "school", + "weight": 0.8958646384053154 + }, + { + "text": "education", + "weight": 0.8701771685545058 + }, + { + "text": "profession", + "weight": 0.8652247371164589 + }, + { + "text": "organ", + "weight": 0.8652067700006192 + }, + { + "text": "culture", + "weight": 0.8643547045363144 + }, + { + "text": "systems", + "weight": 0.8631429637243371 + }, + { + "text": "system", + "weight": 0.8618195686893446 + }, + { + "text": "company", + "weight": 0.8576264613494663 + }, + { + "text": "industry", + "weight": 0.8561042913986937 + }, + { + "text": "sites", + "weight": 0.8549982312837364 + }, + { + "text": "entity", + "weight": 0.854674026809821 + }, + { + "text": "service", + "weight": 0.8545190962182285 + }, + { + "text": "study", + "weight": 0.8514522647520371 + } + ] + } + ] + } + } + ], + "rawQuery": "Who mentioned the 'University of Reading'?" + } + ], + "results": [ + { + "messageMatches": [ + 20, + 15, + 21, + 39, + 22, + 12, + 64, + 25, + 53, + 63, + 17, + 66, + 26, + 28, + 37, + 74, + 62, + 23, + 93, + 65, + 34, + 67, + 84, + 29, + 60 + ], + "entityMatches": [ + 611, + 701, + 661, + 751, + 790, + 662, + 737, + 1419, + 1420, + 1421, + 968, + 1335, + 1292, + 821, + 1030, + 1176, + 1214, + 1215, + 1220, + 1248, + 1301, + 545, + 563, + 1113, + 1582, + 782, + 1131, + 681, + 819, + 820, + 833, + 834, + 835, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1315, + 1319, + 1480, + 1245, + 641, + 1197, + 544, + 561, + 1199 + ], + "topicMatches": [ + 678, + 704, + 730 + ], + "actionMatches": [ + 702, + 666, + 671, + 1203, + 1206, + 1270, + 1276, + 1294, + 1295, + 1035, + 1038, + 1180, + 1184, + 1266, + 1268, + 1272, + 1274, + 1133, + 1135, + 1136, + 1138, + 714, + 715 + ] + } + ], + "cmd": "@kpSearch --query \"Who mentioned the 'University of Reading'?\"" + }, + { + "searchText": "Where did Adrian study?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Where did Adrian study?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "study" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Adrian", + "isNamePronoun": false + } + ], + "isInformational": true + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "study", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "study", + "weight": 10 + }, + { + "text": "research", + "weight": 0.9114409863936512 + }, + { + "text": "studied", + "weight": 0.9103561389052532 + }, + { + "text": "school", + "weight": 0.9079925379332335 + }, + { + "text": "practice", + "weight": 0.9027146067556441 + }, + { + "text": "learn", + "weight": 0.893695599631318 + }, + { + "text": "learning", + "weight": 0.8794088946937061 + }, + { + "text": "work", + "weight": 0.8784604581563951 + }, + { + "text": "education", + "weight": 0.8706472259486062 + }, + { + "text": "reading", + "weight": 0.8697260937760158 + }, + { + "text": "graduate", + "weight": 0.8663067052439166 + }, + { + "text": "test", + "weight": 0.8646195754647529 + }, + { + "text": "job", + "weight": 0.862108727508981 + }, + { + "text": "working", + "weight": 0.8616053383143865 + }, + { + "text": "playing", + "weight": 0.8612889776517374 + }, + { + "text": "be studied by", + "weight": 0.8611824320433201 + }, + { + "text": "watch", + "weight": 0.8609701179411657 + }, + { + "text": "play", + "weight": 0.8591912698087716 + }, + { + "text": "performance", + "weight": 0.8571402908487098 + }, + { + "text": "read", + "weight": 0.8568311262637076 + }, + { + "text": "talk", + "weight": 0.8548672662613528 + }, + { + "text": "task", + "weight": 0.8537984164784945 + }, + { + "text": "career", + "weight": 0.8536716575448631 + }, + { + "text": "look", + "weight": 0.8531697466224798 + }, + { + "text": "books", + "weight": 0.8529481424660358 + }, + { + "text": "subject", + "weight": 0.8526252657885129 + }, + { + "text": "profession", + "weight": 0.8524462127925252 + }, + { + "text": "law", + "weight": 0.8517094869029367 + }, + { + "text": "institution", + "weight": 0.8514522647520371 + }, + { + "text": "paper", + "weight": 0.8508476820774012 + }, + { + "text": "attend", + "weight": 0.8507123525408427 + }, + { + "text": "book", + "weight": 0.8505012638554674 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "adrian", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "adrian tchaikovsky" + } + ] + } + ] + } + } + ], + "rawQuery": "Where did Adrian study?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 20, + 11, + 86, + 5, + 6, + 88, + 0, + 21, + 104, + 91, + 22, + 16, + 99, + 75, + 102, + 40, + 15, + 10, + 14, + 32, + 87, + 72, + 38, + 46 + ], + "entityMatches": [ + 575, + 602, + 1448, + 1516, + 1547, + 1567, + 0, + 6, + 11, + 16, + 21, + 26, + 31, + 36, + 41, + 46, + 51, + 56, + 60, + 66, + 70, + 76, + 80, + 86, + 90, + 96, + 100, + 106, + 110, + 116, + 120, + 126, + 130, + 136, + 140, + 146, + 150, + 156, + 160, + 166, + 170, + 176, + 180, + 186, + 190, + 196, + 200, + 206, + 210, + 216, + 220, + 226, + 230, + 236, + 240, + 246, + 250, + 256, + 260, + 266, + 270, + 276, + 280, + 286, + 290, + 296, + 300, + 306, + 310, + 316, + 320, + 326, + 330, + 336, + 340, + 346, + 350, + 356, + 360, + 366, + 370, + 376, + 380, + 386, + 390, + 396, + 400, + 406, + 410, + 416, + 420, + 426, + 431, + 436, + 441, + 446, + 451, + 456, + 461, + 466, + 471, + 476, + 481, + 486, + 491, + 496, + 501, + 506, + 511, + 516, + 521, + 527, + 567, + 610, + 983, + 1434, + 1580, + 576 + ], + "topicMatches": [ + 585, + 609, + 532, + 1442, + 1595, + 780, + 779, + 678, + 704, + 730, + 646, + 812, + 1381, + 1390, + 636, + 705, + 745, + 541, + 573, + 763, + 788, + 735 + ], + "actionMatches": [ + 581, + 582, + 583, + 604, + 606, + 1520, + 1522, + 1550, + 1551, + 1554, + 1555, + 1571, + 1575, + 3, + 4, + 8, + 13, + 18, + 23, + 28, + 33, + 38, + 43, + 48, + 53, + 58, + 63, + 64, + 68, + 73, + 74, + 78, + 83, + 84, + 88, + 93, + 94, + 98, + 103, + 104, + 108, + 113, + 114, + 118, + 123, + 124, + 128, + 133, + 134, + 138, + 143, + 144, + 148, + 153, + 154, + 158, + 163, + 164, + 168, + 173, + 174, + 178, + 183, + 184, + 188, + 193, + 194, + 198, + 203, + 204, + 208, + 213, + 214, + 218, + 223, + 224, + 228, + 233, + 234, + 238, + 243, + 244, + 248, + 253, + 254, + 258, + 263, + 264, + 268, + 273, + 274, + 278, + 283, + 284, + 288, + 293, + 294, + 298, + 303, + 304, + 308, + 313, + 314, + 318, + 323, + 324, + 328, + 333, + 334, + 338, + 343, + 344, + 348, + 353, + 354, + 358, + 363, + 364, + 368, + 373, + 374, + 378, + 383, + 384, + 388, + 393, + 394, + 398, + 403, + 404, + 408, + 413, + 414, + 418, + 423, + 424, + 428, + 433, + 438, + 443, + 448, + 453, + 458, + 463, + 468, + 473, + 478, + 483, + 488, + 493, + 498, + 503, + 508, + 513, + 518, + 523, + 528, + 529, + 570, + 571, + 618, + 619, + 1440, + 1584, + 1589 + ] + } + ], + "cmd": "@kpSearch --query \"Where did Adrian study?\"" + }, + { + "searchText": "What did Christina say about Skynet?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Christina say about Skynet?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Christina", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "Skynet", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "christina", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "christina warren" + } + ] + }, + { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "christina", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "christina warren" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "skynet", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "skynet", + "weight": 10 + }, + { + "text": "robot butlers", + "weight": 0.865481606474423 + }, + { + "text": "terminator", + "weight": 0.8612655359336829 + }, + { + "text": "chatbots", + "weight": 0.8571796805393255 + }, + { + "text": "cyberpunk", + "weight": 0.8568927429046935 + }, + { + "text": "chatbot", + "weight": 0.8551971814217844 + }, + { + "text": "robots", + "weight": 0.8548758659311237 + }, + { + "text": "asimov", + "weight": 0.85422490355617 + }, + { + "text": "artificial intelligence", + "weight": 0.8542080117789144 + }, + { + "text": "robot behavior", + "weight": 0.852444634504962 + }, + { + "text": "sci-fi", + "weight": 0.8514184691138944 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Christina say about Skynet?" + } + ], + "results": [ + { + "messageMatches": [ + 5, + 88 + ], + "entityMatches": [ + 20, + 435, + 567 + ], + "topicMatches": [ + 572, + 1455 + ], + "actionMatches": [ + 23, + 24, + 438, + 439 + ] + } + ], + "cmd": "@kpSearch --query \"What did Christina say about Skynet?\"" + }, + { + "searchText": "Corvids? What the hell are they?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Corvids? What the hell are they?", + "filters": [ + { + "searchTerms": [ + "Corvids" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "corvids", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "non-corvid", + "weight": 0.9165769335299013 + }, + { + "text": "semi_created_corvids", + "weight": 0.9035755007931823 + }, + { + "text": "crows", + "weight": 0.8795190531205701 + }, + { + "text": "parallels between corvids and ai", + "weight": 0.8520058268712254 + } + ] + } + ] + } + } + ], + "rawQuery": "Corvids? What the hell are they?" + } + ], + "results": [ + { + "messageMatches": [ + 37, + 36, + 62 + ], + "entityMatches": [ + 941, + 921, + 1198, + 942 + ], + "topicMatches": [ + 955, + 933, + 939 + ], + "actionMatches": [ + 944, + 945, + 946, + 947, + 948, + 949, + 950, + 951, + 952, + 953, + 925, + 926, + 927, + 928, + 930 + ] + } + ], + "cmd": "@kpSearch --query \"Corvids? What the hell are they?\"" + }, + { + "searchText": "Who was that chess grandmaster we talked about?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who was that chess grandmaster we talked about?", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "person" + ], + "facets": [ + { + "facetName": "profession", + "facetValue": "chess grandmaster" + } + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "booleanOp": "or_max", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "person", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "person", + "weight": 10 + }, + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": { + "term": { + "text": "profession", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "profession", + "weight": 10 + }, + { + "text": "professions", + "weight": 0.9062057338955631 + }, + { + "text": "career", + "weight": 0.8843828439345859 + }, + { + "text": "job", + "weight": 0.8810864101145465 + }, + { + "text": "legal profession", + "weight": 0.8725351036569126 + }, + { + "text": "graduate", + "weight": 0.8717973313008607 + }, + { + "text": "school", + "weight": 0.8654285994731649 + }, + { + "text": "institution", + "weight": 0.8652247371164589 + }, + { + "text": "industry", + "weight": 0.8644360684955182 + }, + { + "text": "service", + "weight": 0.8554271438523046 + }, + { + "text": "task", + "weight": 0.8542360759338502 + }, + { + "text": "venture", + "weight": 0.8536072134695121 + }, + { + "text": "education", + "weight": 0.8533070872419408 + }, + { + "text": "creator", + "weight": 0.8531249948821384 + }, + { + "text": "study", + "weight": 0.8524462127925252 + }, + { + "text": "performance", + "weight": 0.8502792854058963 + }, + { + "text": "working", + "weight": 0.8501010491016996 + } + ], + "relatedTermsRequired": true + }, + "propertyValue": { + "term": { + "text": "chess grandmaster", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "human grandmasters", + "weight": 0.9119755253823068 + }, + { + "text": "chess", + "weight": 0.8989321678306585 + }, + { + "text": "chess computers", + "weight": 0.8952173403690811 + }, + { + "text": "magnus carlsen", + "weight": 0.8814657530086609 + } + ], + "relatedTermsRequired": true + } + } + ] + }, + { + "propertyName": { + "term": { + "text": "chess grandmaster", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "human grandmasters", + "weight": 0.9119755253823068 + }, + { + "text": "chess", + "weight": 0.8989321678306585 + }, + { + "text": "chess computers", + "weight": 0.8952173403690811 + }, + { + "text": "magnus carlsen", + "weight": 0.8814657530086609 + } + ], + "relatedTermsRequired": true + }, + "propertyValue": { + "term": { + "text": "topic", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "topic", + "weight": 10 + }, + { + "text": "topics", + "weight": 10 + }, + { + "text": "subject", + "weight": 0.9244286516272775 + }, + { + "text": "title", + "weight": 0.9172125632827948 + }, + { + "text": "headline", + "weight": 0.9063936787745248 + }, + { + "text": "article", + "weight": 0.8987077901844163 + }, + { + "text": "articles", + "weight": 0.8852519425963353 + }, + { + "text": "concept", + "weight": 0.8830720299308233 + }, + { + "text": "issues", + "weight": 0.8789811032732467 + }, + { + "text": "story", + "weight": 0.8740800471468706 + }, + { + "text": "genre", + "weight": 0.8738597675354443 + }, + { + "text": "objective", + "weight": 0.8726506328424951 + }, + { + "text": "idea", + "weight": 0.8713625199275622 + }, + { + "text": "interest", + "weight": 0.8705433302959388 + }, + { + "text": "content", + "weight": 0.8700402772944661 + }, + { + "text": "context", + "weight": 0.8659783051183786 + }, + { + "text": "talk", + "weight": 0.8651888538238937 + }, + { + "text": "comment", + "weight": 0.8640479131852651 + }, + { + "text": "entity", + "weight": 0.8631240831976681 + }, + { + "text": "task", + "weight": 0.8629106911905021 + }, + { + "text": "stories", + "weight": 0.8622411349432463 + }, + { + "text": "attention", + "weight": 0.861751423606013 + }, + { + "text": "question", + "weight": 0.861229054169688 + }, + { + "text": "paper", + "weight": 0.8609014719580561 + }, + { + "text": "questions", + "weight": 0.8605757119049169 + }, + { + "text": "movie", + "weight": 0.8604859705769485 + }, + { + "text": "feature", + "weight": 0.8586640763250176 + }, + { + "text": "fiction", + "weight": 0.857902088165245 + }, + { + "text": "issue", + "weight": 0.8578367845524639 + }, + { + "text": "project", + "weight": 0.8577814571246507 + }, + { + "text": "event", + "weight": 0.8573353488820965 + }, + { + "text": "activity", + "weight": 0.8551864120361459 + }, + { + "text": "stuff", + "weight": 0.8550900722810694 + }, + { + "text": "job", + "weight": 0.8549337715752651 + }, + { + "text": "tips", + "weight": 0.8546512624601585 + }, + { + "text": "episode", + "weight": 0.8544778516134902 + }, + { + "text": "action", + "weight": 0.8544739262402108 + }, + { + "text": "picture", + "weight": 0.8544639302645739 + }, + { + "text": "mention", + "weight": 0.8536847758494411 + }, + { + "text": "impact", + "weight": 0.8535074406761329 + }, + { + "text": "sites", + "weight": 0.8528474712582808 + }, + { + "text": "object", + "weight": 0.8527627098598423 + }, + { + "text": "technology", + "weight": 0.8520606658601805 + }, + { + "text": "view", + "weight": 0.852020718657536 + }, + { + "text": "queries", + "weight": 0.8516179057270591 + }, + { + "text": "purpose", + "weight": 0.8514263006873108 + }, + { + "text": "problem", + "weight": 0.8513422460870528 + }, + { + "text": "author", + "weight": 0.8510317626555526 + }, + { + "text": "published", + "weight": 0.8509244144956545 + }, + { + "text": "goal", + "weight": 0.8506191378690882 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "Who was that chess grandmaster we talked about?" + } + ], + "results": [ + { + "messageMatches": [ + 15, + 48 + ], + "entityMatches": [ + 641, + 1081 + ] + } + ], + "cmd": "@kpSearch --query \"Who was that chess grandmaster we talked about?\"" + }, + { + "searchText": "What did Kevin say about Children of Clocks and Kids of Ruin?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Kevin say about Children of Clocks and Kids of Ruin?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "targetEntities": [ + { + "name": "Children of Clocks", + "isNamePronoun": false, + "type": [ + "book" + ] + }, + { + "name": "Kids of Ruin", + "isNamePronoun": false, + "type": [ + "book" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.8607185122587705 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8709546703086021 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.861640107976141 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "children of clocks", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 0.9246417034789157 + }, + { + "text": "children of memory", + "weight": 0.8887280268920221 + }, + { + "text": "children of time characters", + "weight": 0.876863301295453 + }, + { + "text": "children", + "weight": 0.8561049990435207 + }, + { + "text": "children of ruin", + "weight": 10 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "books", + "weight": 10 + }, + { + "text": "another book", + "weight": 0.8953175048890671 + }, + { + "text": "fiction", + "weight": 0.8919534699311819 + }, + { + "text": "paper", + "weight": 0.8872714584895001 + }, + { + "text": "book series", + "weight": 0.8872089573927062 + }, + { + "text": "movie", + "weight": 0.8820902843985773 + }, + { + "text": "book release", + "weight": 0.8786848604414796 + }, + { + "text": "novel", + "weight": 0.8664136885729098 + }, + { + "text": "next book", + "weight": 0.8634568736402546 + }, + { + "text": "published", + "weight": 0.8625212802008864 + }, + { + "text": "novels", + "weight": 0.8616026857534473 + }, + { + "text": "story", + "weight": 0.8613459701612218 + }, + { + "text": "reading", + "weight": 0.8602563203209073 + }, + { + "text": "job", + "weight": 0.8599676519780717 + }, + { + "text": "series", + "weight": 0.8597752080742203 + }, + { + "text": "subsequent books", + "weight": 0.8586964922385396 + }, + { + "text": "movies", + "weight": 0.857360455316826 + }, + { + "text": "school", + "weight": 0.8563492536817514 + }, + { + "text": "documents", + "weight": 0.8560191483029079 + }, + { + "text": "literature", + "weight": 0.8549181392434507 + }, + { + "text": "his most recent book", + "weight": 0.8543240519542384 + }, + { + "text": "game", + "weight": 0.8539545088748834 + }, + { + "text": "work", + "weight": 0.8528324064557533 + }, + { + "text": "stories", + "weight": 0.851789983525879 + }, + { + "text": "writing", + "weight": 0.8509183311716293 + }, + { + "text": "authors", + "weight": 0.8508783628156902 + }, + { + "text": "study", + "weight": 0.8505012638554674 + } + ] + }, + { + "term": { + "text": "kids of ruin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of ruin entity", + "weight": 0.9279065941200743 + } + ] + }, + { + "term": { + "text": "book", + "weight": 10 + }, + "relatedTerms": [] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "children of clocks", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 0.9246417034789157 + }, + { + "text": "children of memory", + "weight": 0.8887280268920221 + }, + { + "text": "children of time characters", + "weight": 0.876863301295453 + }, + { + "text": "children", + "weight": 0.8561049990435207 + }, + { + "text": "children of ruin", + "weight": 0.854270109875692 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "children of clocks", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 0.9246417034789157 + }, + { + "text": "children of memory", + "weight": 0.8887280268920221 + }, + { + "text": "children of time characters", + "weight": 0.876863301295453 + }, + { + "text": "children", + "weight": 0.8561049990435207 + }, + { + "text": "children of ruin", + "weight": 0.854270109875692 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "children of clocks", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of time", + "weight": 0.9246417034789157 + }, + { + "text": "children of memory", + "weight": 0.8887280268920221 + }, + { + "text": "children of time characters", + "weight": 0.876863301295453 + }, + { + "text": "children", + "weight": 0.8561049990435207 + }, + { + "text": "children of ruin", + "weight": 0.854270109875692 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + }, + { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "kids of ruin", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of ruin", + "weight": 10 + }, + { + "text": "children of ruin entity", + "weight": 0.9279065941200743 + }, + { + "text": "children of time", + "weight": 0.8713821456309764 + }, + { + "text": "children of memory", + "weight": 0.8588231010105535 + }, + { + "text": "children", + "weight": 0.8549683523572993 + }, + { + "text": "kid", + "weight": 0.8526699489028028 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "kids of ruin", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "children of ruin", + "weight": 10 + }, + { + "text": "children of ruin entity", + "weight": 0.9279065941200743 + }, + { + "text": "children of time", + "weight": 0.8713821456309764 + }, + { + "text": "children of memory", + "weight": 0.8588231010105535 + }, + { + "text": "children", + "weight": 0.8549683523572993 + }, + { + "text": "kid", + "weight": 0.8526699489028028 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "kids of ruin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "children of ruin", + "weight": 10 + }, + { + "text": "children of ruin entity", + "weight": 0.9279065941200743 + }, + { + "text": "children of time", + "weight": 0.8713821456309764 + }, + { + "text": "children of memory", + "weight": 0.8588231010105535 + }, + { + "text": "children", + "weight": 0.8549683523572993 + }, + { + "text": "kid", + "weight": 0.8526699489028028 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Kevin say about Children of Clocks and Kids of Ruin?" + } + ], + "results": [ + { + "messageMatches": [ + 12, + 30, + 34, + 36, + 62 + ], + "entityMatches": [ + 55, + 145, + 165, + 175, + 305, + 1199, + 614, + 852, + 882, + 919, + 1198, + 56, + 57, + 146, + 147, + 166, + 167, + 176, + 177, + 306, + 307, + 610, + 887, + 920, + 612, + 613, + 615, + 616, + 883, + 886, + 885, + 1200, + 884, + 611 + ], + "topicMatches": [ + 1209, + 857, + 931, + 626, + 897, + 1208, + 898, + 625 + ], + "actionMatches": [ + 58, + 59, + 148, + 149, + 168, + 169, + 178, + 179, + 308, + 309, + 929, + 890, + 893, + 888, + 891, + 889, + 892, + 1201, + 1204, + 1202, + 1205 + ] + } + ], + "cmd": "@kpSearch --query \"What did Kevin say about Children of Clocks and Kids of Ruin?\"" + }, + { + "searchText": "Give me a list of animals", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "list of animals", + "filters": [ + { + "entitySearchTerms": [ + { + "name": "*", + "isNamePronoun": false, + "type": [ + "animal" + ] + } + ], + "searchTerms": [] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "type", + "propertyValue": { + "term": { + "text": "animal", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "animal", + "weight": 10 + }, + { + "text": "animal behavior", + "weight": 0.8853139395001849 + }, + { + "text": "human", + "weight": 0.8761103581434334 + }, + { + "text": "species", + "weight": 0.8728613602342458 + }, + { + "text": "anthropomorphic animals", + "weight": 0.870640551417944 + }, + { + "text": "alien", + "weight": 0.8643427139969423 + }, + { + "text": "flies", + "weight": 0.8573647386380743 + }, + { + "text": "robot", + "weight": 0.8562761334141211 + }, + { + "text": "artifact", + "weight": 0.855085062113182 + }, + { + "text": "organ", + "weight": 0.8548290946071508 + }, + { + "text": "vehicle", + "weight": 0.854459220485194 + }, + { + "text": "non-human", + "weight": 0.8511867841387388 + }, + { + "text": "zoology", + "weight": 0.8507391772978699 + }, + { + "text": "nature", + "weight": 0.8501104053451894 + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + ], + "rawQuery": "list of animals" + } + ], + "results": [ + { + "messageMatches": [ + 21, + 33, + 35, + 55, + 73, + 83, + 31, + 36, + 67, + 93, + 41, + 51, + 39, + 40 + ], + "entityMatches": [ + 709, + 710, + 863, + 864, + 899, + 900, + 1129, + 1130, + 1316, + 1317, + 1318, + 1408, + 1409, + 1410, + 859, + 860, + 921, + 1291, + 1481, + 1005, + 1106, + 968, + 984 + ] + } + ], + "cmd": "@kpSearch --query \"Give me a list of animals\"" + }, + { + "searchText": "What were we saying about Starship troopers?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What were we saying about Starship troopers?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "saying" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "we", + "isNamePronoun": true + } + ], + "targetEntities": [ + { + "name": "Starship troopers", + "isNamePronoun": false, + "type": [ + "movie" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "saying", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 0.8965306655753185 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "starship troopers", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "starship troopers bugs", + "weight": 0.9343646771217653 + }, + { + "text": "star trek", + "weight": 0.8965248999343911 + }, + { + "text": "sci-fi novels", + "weight": 0.8618417566446194 + }, + { + "text": "sci-fi", + "weight": 0.8599100501787743 + }, + { + "text": "science fiction", + "weight": 0.8528870868470667 + }, + { + "text": "dystopian take in sci-fi", + "weight": 0.8505182629469817 + } + ] + }, + { + "term": { + "text": "movie", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "movies", + "weight": 10 + }, + { + "text": "episode", + "weight": 0.8963339782245872 + }, + { + "text": "fiction", + "weight": 0.8950920252826218 + }, + { + "text": "game", + "weight": 0.8874445438030081 + }, + { + "text": "actors", + "weight": 0.8860106632957048 + }, + { + "text": "book", + "weight": 0.8820902843985773 + }, + { + "text": "stories", + "weight": 0.87584453641261 + }, + { + "text": "story", + "weight": 0.8735464155245114 + }, + { + "text": "writer", + "weight": 0.8733248049247987 + }, + { + "text": "watch", + "weight": 0.8726356920361626 + }, + { + "text": "series", + "weight": 0.872197969873526 + }, + { + "text": "picture", + "weight": 0.869302853880071 + }, + { + "text": "performance", + "weight": 0.869120731073309 + }, + { + "text": "books", + "weight": 0.8663417625018435 + }, + { + "text": "music", + "weight": 0.8656983424097792 + }, + { + "text": "job", + "weight": 0.8640722698464682 + }, + { + "text": "genre", + "weight": 0.8617874159731083 + }, + { + "text": "show", + "weight": 0.8614962890272202 + }, + { + "text": "topic", + "weight": 0.8604859705769485 + }, + { + "text": "article", + "weight": 0.8604675436012976 + }, + { + "text": "playing", + "weight": 0.8584992674486526 + }, + { + "text": "project", + "weight": 0.8577634602326532 + }, + { + "text": "headline", + "weight": 0.8571046138334414 + }, + { + "text": "venture", + "weight": 0.8569466293500919 + }, + { + "text": "character", + "weight": 0.8564648013190839 + }, + { + "text": "writers", + "weight": 0.8559054574599632 + }, + { + "text": "act", + "weight": 0.8541954594033725 + }, + { + "text": "play", + "weight": 0.8540473424047181 + }, + { + "text": "title", + "weight": 0.8521043786323057 + }, + { + "text": "action", + "weight": 0.8516147580884164 + }, + { + "text": "voice", + "weight": 0.85124096320751 + }, + { + "text": "paper", + "weight": 0.8509334797970916 + }, + { + "text": "work", + "weight": 0.8508204202300914 + }, + { + "text": "youtube", + "weight": 0.8506460674832327 + }, + { + "text": "school", + "weight": 0.8504256546445399 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "saying", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 0.8965306655753185 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "starship troopers", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "starship troopers bugs", + "weight": 0.9343646771217653 + }, + { + "text": "star trek", + "weight": 0.8965248999343911 + }, + { + "text": "sci-fi novels", + "weight": 0.8618417566446194 + }, + { + "text": "sci-fi", + "weight": 0.8599100501787743 + }, + { + "text": "science fiction", + "weight": 0.8528870868470667 + }, + { + "text": "dystopian take in sci-fi", + "weight": 0.8505182629469817 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "starship troopers", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "starship troopers bugs", + "weight": 0.9343646771217653 + }, + { + "text": "star trek", + "weight": 0.8965248999343911 + }, + { + "text": "sci-fi novels", + "weight": 0.8618417566446194 + }, + { + "text": "sci-fi", + "weight": 0.8599100501787743 + }, + { + "text": "science fiction", + "weight": 0.8528870868470667 + }, + { + "text": "dystopian take in sci-fi", + "weight": 0.8505182629469817 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "starship troopers", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "starship troopers bugs", + "weight": 0.9343646771217653 + }, + { + "text": "star trek", + "weight": 0.8965248999343911 + }, + { + "text": "sci-fi novels", + "weight": 0.8618417566446194 + }, + { + "text": "sci-fi", + "weight": 0.8599100501787743 + }, + { + "text": "science fiction", + "weight": 0.8528870868470667 + }, + { + "text": "dystopian take in sci-fi", + "weight": 0.8505182629469817 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What were we saying about Starship troopers?" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 19, + 28, + 76, + 86, + 93, + 35, + 66, + 5, + 6, + 12, + 63, + 74, + 88, + 96, + 78, + 67, + 84 + ], + "entityMatches": [ + 1466, + 689, + 817, + 1355, + 1439, + 1483, + 900, + 687, + 1262, + 568, + 569, + 577, + 612, + 613, + 614, + 615, + 616, + 1213, + 1331, + 1464, + 1468, + 1434, + 576, + 1450, + 688, + 1217, + 611, + 1465, + 567, + 1517, + 575, + 1374, + 1463, + 818 + ], + "topicMatches": [ + 1476, + 573, + 572, + 1455, + 586, + 620, + 1240, + 1284, + 1300, + 1340, + 1382, + 1431, + 1447, + 1471, + 1497, + 1525 + ], + "actionMatches": [ + 581, + 582, + 583, + 1487, + 904, + 905, + 909, + 910, + 691, + 1451, + 1452, + 690, + 692, + 1227, + 1234, + 824, + 828 + ] + } + ], + "cmd": "@kpSearch --query \"What were we saying about Starship troopers?\"" + }, + { + "searchText": "Who studied zoology and psychology", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Who studied zoology and psychology", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "studied" + ], + "tense": "Past" + }, + "actorEntities": "*", + "targetEntities": [ + { + "name": "zoology", + "isNamePronoun": false, + "type": [ + "subject" + ] + }, + { + "name": "psychology", + "isNamePronoun": false, + "type": [ + "subject" + ] + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "studied", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "studied", + "weight": 10 + }, + { + "text": "study", + "weight": 0.9103561389052532 + }, + { + "text": "be studied by", + "weight": 0.9056059514319782 + }, + { + "text": "research", + "weight": 0.8649162077240531 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "zoology", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "zoology and psychology", + "weight": 0.9390952921391027 + }, + { + "text": "animal behavior", + "weight": 0.8849844577742009 + }, + { + "text": "animal world, specifically insects", + "weight": 0.8509550815501485 + }, + { + "text": "animal", + "weight": 0.8507391772978699 + } + ] + }, + { + "term": { + "text": "subject", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "topic", + "weight": 0.9244286516272775 + }, + { + "text": "topics", + "weight": 0.9054510147213362 + }, + { + "text": "objective", + "weight": 0.8985855497865116 + }, + { + "text": "title", + "weight": 0.8883948994206865 + }, + { + "text": "person", + "weight": 0.8866757679593456 + }, + { + "text": "article", + "weight": 0.8856070631171192 + }, + { + "text": "issue", + "weight": 0.8818125600028686 + }, + { + "text": "entity", + "weight": 0.8792712933989035 + }, + { + "text": "object", + "weight": 0.878688675406269 + }, + { + "text": "individual", + "weight": 0.8771605869122577 + }, + { + "text": "concept", + "weight": 0.8761655711979275 + }, + { + "text": "task", + "weight": 0.8735224602290171 + }, + { + "text": "headline", + "weight": 0.8725343897474616 + }, + { + "text": "context", + "weight": 0.871408467069043 + }, + { + "text": "thing", + "weight": 0.8702710611552207 + }, + { + "text": "stuff", + "weight": 0.8687386340861302 + }, + { + "text": "content", + "weight": 0.868301163075979 + }, + { + "text": "purpose", + "weight": 0.8681073719767934 + }, + { + "text": "articles", + "weight": 0.8672715936352198 + }, + { + "text": "face", + "weight": 0.8672699223903719 + }, + { + "text": "condition", + "weight": 0.8661797268449712 + }, + { + "text": "author", + "weight": 0.8642068647464447 + }, + { + "text": "source", + "weight": 0.8637434646302785 + }, + { + "text": "issues", + "weight": 0.8630328685034142 + }, + { + "text": "submit", + "weight": 0.8624622391854637 + }, + { + "text": "sites", + "weight": 0.8612462324668475 + }, + { + "text": "character", + "weight": 0.8604913620384654 + }, + { + "text": "interest", + "weight": 0.8601106673598514 + }, + { + "text": "host", + "weight": 0.858436185916323 + }, + { + "text": "child", + "weight": 0.8582048320572854 + }, + { + "text": "emotion", + "weight": 0.8574483470684723 + }, + { + "text": "creator", + "weight": 0.857297779248452 + }, + { + "text": "species", + "weight": 0.8567815473990814 + }, + { + "text": "question", + "weight": 0.8566164784401654 + }, + { + "text": "idea", + "weight": 0.8565091358975251 + }, + { + "text": "impact", + "weight": 0.856198738158886 + }, + { + "text": "sense", + "weight": 0.8559022996694529 + }, + { + "text": "field", + "weight": 0.8551925382237141 + }, + { + "text": "method", + "weight": 0.8550748242115968 + }, + { + "text": "comment", + "weight": 0.8540681693409468 + }, + { + "text": "project", + "weight": 0.8538818162326582 + }, + { + "text": "story", + "weight": 0.8538698890874821 + }, + { + "text": "artifact", + "weight": 0.853135843873496 + }, + { + "text": "genre", + "weight": 0.8530703738503651 + }, + { + "text": "variable", + "weight": 0.8530573583212676 + }, + { + "text": "present", + "weight": 0.852915572691196 + }, + { + "text": "attention", + "weight": 0.8524762700732581 + }, + { + "text": "origin", + "weight": 0.8523851494605873 + } + ] + }, + { + "term": { + "text": "psychology", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "personality", + "weight": 0.8710771421241397 + }, + { + "text": "human behavior", + "weight": 0.8673234284190323 + }, + { + "text": "literature", + "weight": 0.8510332552612891 + } + ] + }, + { + "term": { + "text": "subject", + "weight": 10 + }, + "relatedTerms": [] + } + ] + } + } + ], + "rawQuery": "Who studied zoology and psychology" + } + ], + "results": [ + { + "messageMatches": [ + 21, + 20, + 12, + 83, + 39, + 40, + 33, + 16, + 22, + 29, + 35, + 10, + 76, + 37, + 14, + 41, + 31, + 87, + 72, + 38, + 42, + 68, + 46, + 52, + 54 + ], + "entityMatches": [ + 707, + 706, + 966, + 819, + 833, + 1484, + 547, + 561, + 0, + 1, + 2, + 5, + 6, + 7, + 10, + 11, + 12, + 15, + 16, + 17, + 20, + 21, + 22, + 25, + 26, + 27, + 30, + 31, + 32, + 35, + 36, + 37, + 40, + 41, + 42, + 45, + 46, + 47, + 50, + 51, + 52, + 55, + 56, + 57, + 60, + 61, + 62, + 65, + 66, + 67, + 70, + 71, + 72, + 75, + 76, + 77, + 80, + 81, + 82, + 85, + 86, + 87, + 90, + 91, + 92, + 95, + 96, + 97, + 100, + 101, + 102, + 105, + 106, + 107, + 110, + 111, + 112, + 115, + 116, + 117, + 120, + 121, + 122, + 125, + 126, + 127, + 130, + 131, + 132, + 135, + 136, + 137, + 140, + 141, + 142, + 145, + 146, + 147, + 150, + 151, + 152, + 155, + 156, + 157, + 160, + 161, + 162, + 165, + 166, + 167, + 170, + 171, + 172, + 175, + 176, + 177, + 180, + 181, + 182, + 185, + 186, + 187, + 190, + 191, + 192, + 195, + 196, + 197, + 200, + 201, + 202, + 205, + 206, + 207, + 210, + 211, + 212, + 215, + 216, + 217, + 220, + 221, + 222, + 225, + 226, + 227, + 230, + 231, + 232, + 235, + 236, + 237, + 240, + 241, + 242, + 245, + 246, + 247, + 250, + 251, + 252, + 255, + 256, + 257, + 260, + 261, + 262, + 265, + 266, + 267, + 270, + 271, + 272, + 275, + 276, + 277, + 280, + 281, + 282, + 285, + 286, + 287, + 290, + 291, + 292, + 295, + 296, + 297, + 300, + 301, + 302, + 305, + 306, + 307, + 310, + 311, + 312, + 315, + 316, + 317, + 320, + 321, + 322, + 325, + 326, + 327, + 330, + 331, + 332, + 335, + 336, + 337, + 340, + 341, + 342, + 345, + 346, + 347, + 350, + 351, + 352, + 355, + 356, + 357, + 360, + 361, + 362, + 365, + 366, + 367, + 370, + 371, + 372, + 375, + 376, + 377, + 380, + 381, + 382, + 385, + 386, + 387, + 390, + 391, + 392, + 395, + 396, + 397, + 400, + 401, + 402, + 405, + 406, + 407, + 410, + 411, + 412, + 415, + 416, + 417, + 420, + 421, + 422, + 425, + 426, + 427, + 430, + 431, + 432, + 435, + 436, + 437, + 440, + 441, + 442, + 445, + 446, + 447, + 450, + 451, + 452, + 455, + 456, + 457, + 460, + 461, + 462, + 465, + 466, + 467, + 470, + 471, + 472, + 475, + 476, + 477, + 480, + 481, + 482, + 485, + 486, + 487, + 490, + 491, + 492, + 495, + 496, + 497, + 500, + 501, + 502, + 505, + 506, + 507, + 510, + 511, + 512, + 515, + 516, + 517, + 520, + 521, + 522, + 527, + 544, + 566, + 567, + 575, + 591, + 597, + 602, + 629, + 641, + 651, + 663, + 708, + 750, + 770, + 801, + 818, + 887, + 920, + 980, + 981, + 982, + 983, + 985, + 1031, + 1032, + 1047, + 1084, + 1132, + 1249, + 1288, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1385, + 1407, + 1434, + 1435, + 1448, + 1456, + 1463, + 1478, + 1516, + 1518, + 1567, + 1580, + 1049, + 681, + 820, + 834, + 835, + 836, + 837, + 884, + 941, + 1175, + 1177, + 1289, + 1290, + 1292, + 1315, + 1319, + 1480, + 533, + 664, + 665, + 738, + 769, + 791, + 858, + 984, + 986, + 1030, + 1062, + 1063, + 1065, + 1066, + 1081, + 1131, + 1165, + 1166, + 1374, + 1396, + 1146, + 967, + 1163, + 1192, + 1218, + 1261, + 1263, + 1265, + 1438, + 1517, + 1519, + 1107, + 1029, + 1033, + 1545, + 1546, + 1147, + 1061, + 838, + 1216, + 1219, + 1264, + 1346, + 1547, + 885, + 886, + 1200, + 1465, + 1466, + 1504, + 859, + 860, + 921, + 1291, + 1481, + 1064, + 1197, + 852, + 1565, + 1566, + 688, + 689, + 817, + 1217, + 1355, + 1439, + 1483, + 1507, + 652, + 653, + 1345, + 1450, + 709, + 710, + 863, + 864, + 899, + 900, + 1129, + 1130, + 1316, + 1317, + 1318, + 1408, + 1409, + 1410, + 610, + 1083, + 1199, + 1106, + 1082, + 1467, + 1004, + 1078, + 576, + 1464, + 1505, + 1506, + 968, + 640 + ], + "topicMatches": [ + 622, + 733, + 623, + 732, + 731, + 1327, + 1157, + 1243, + 1191, + 660, + 1348 + ], + "actionMatches": [ + 716, + 724, + 717, + 725, + 715, + 723, + 714, + 722, + 1007, + 1010, + 549, + 554, + 1267, + 1273, + 903, + 908, + 929, + 1009, + 1012, + 1087, + 1269, + 1275, + 992, + 993, + 1068, + 1070, + 1294, + 1295, + 1309, + 825, + 829, + 1090, + 1096, + 1150, + 1152, + 1587, + 1592, + 1067, + 1069, + 1270, + 642, + 643, + 644, + 645, + 822, + 824, + 826, + 828, + 1225, + 1232, + 536, + 753, + 785, + 1133, + 1135, + 1136, + 1138, + 668, + 673, + 1548, + 1552, + 548, + 1227, + 1234, + 1509, + 633, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 971, + 974, + 1451, + 865, + 871, + 869, + 870, + 875, + 876, + 1050, + 927, + 581, + 582, + 583, + 888, + 904, + 758, + 760 + ] + } + ], + "cmd": "@kpSearch --query \"Who studied zoology and psychology\"" + }, + { + "searchText": "Summarize Kevin's thoughts on artificial intelligence", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize Kevin's thoughts on artificial intelligence", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "isInformational": false + }, + "searchTerms": [ + "artificial intelligence" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "artificial intelligence", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "human-seeming intelligence", + "weight": 0.8864734409947517 + }, + { + "text": "artificial", + "weight": 0.880934040286398 + }, + { + "text": "strong ai", + "weight": 0.8793549419006241 + }, + { + "text": "generative ai", + "weight": 0.8768135499015218 + }, + { + "text": "generative ai and creativity", + "weight": 0.8763137497687796 + }, + { + "text": "emergent ai", + "weight": 0.8727685866397861 + }, + { + "text": "topic: superhuman ai", + "weight": 0.8724873582946049 + }, + { + "text": "science fiction ai", + "weight": 0.8721353651769909 + }, + { + "text": "ai super intelligence", + "weight": 0.8712886210805108 + }, + { + "text": "artificially generated", + "weight": 0.8700178090671777 + }, + { + "text": "chatbots", + "weight": 0.8685584253229648 + }, + { + "text": "robot behavior", + "weight": 0.8682360373003561 + }, + { + "text": "superhuman ai", + "weight": 0.8672326833864481 + }, + { + "text": "intelligent life", + "weight": 0.8665644533589565 + }, + { + "text": "chatbot", + "weight": 0.8657365939584234 + }, + { + "text": "generative ai systems", + "weight": 0.8626742573760334 + }, + { + "text": "ai and human interaction", + "weight": 0.862162478298934 + }, + { + "text": "intelligence", + "weight": 0.8613737945944341 + }, + { + "text": "written by ai", + "weight": 0.8584006982933475 + }, + { + "text": "artificial events", + "weight": 0.8573337764997134 + }, + { + "text": "self-awareness in chatbots", + "weight": 0.8543346080058085 + }, + { + "text": "skynet", + "weight": 0.8542080117789144 + }, + { + "text": "robot butlers", + "weight": 0.8502714496515634 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "Summarize Kevin's thoughts on artificial intelligence" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 60, + 40, + 66, + 97, + 93, + 36, + 48, + 28, + 99, + 74, + 2, + 62, + 44, + 4, + 84, + 76, + 95, + 87, + 72, + 38, + 30, + 70, + 68, + 46 + ], + "entityMatches": [ + 5, + 15, + 25, + 35, + 45, + 55, + 65, + 75, + 85, + 95, + 105, + 115, + 125, + 135, + 145, + 155, + 165, + 175, + 185, + 195, + 205, + 215, + 225, + 235, + 245, + 255, + 265, + 275, + 285, + 295, + 305, + 315, + 325, + 335, + 345, + 355, + 365, + 375, + 385, + 395, + 405, + 415, + 430, + 440, + 450, + 460, + 470, + 480, + 490, + 500, + 510, + 520, + 544, + 566, + 982, + 6, + 7, + 16, + 17, + 26, + 27, + 36, + 37, + 46, + 47, + 56, + 57, + 66, + 67, + 76, + 77, + 86, + 87, + 96, + 97, + 106, + 107, + 116, + 117, + 126, + 127, + 136, + 137, + 146, + 147, + 156, + 157, + 166, + 167, + 176, + 177, + 186, + 187, + 196, + 197, + 206, + 207, + 216, + 217, + 226, + 227, + 236, + 237, + 246, + 247, + 256, + 257, + 266, + 267, + 276, + 277, + 286, + 287, + 296, + 297, + 306, + 307, + 316, + 317, + 326, + 327, + 336, + 337, + 346, + 347, + 356, + 357, + 366, + 367, + 376, + 377, + 386, + 387, + 396, + 397, + 406, + 407, + 416, + 417, + 431, + 432, + 441, + 442, + 451, + 452, + 461, + 462, + 471, + 472, + 481, + 482, + 491, + 492, + 501, + 502, + 511, + 512, + 521, + 522, + 575, + 597, + 610, + 629, + 651, + 770, + 818, + 887, + 920, + 980, + 981, + 983, + 985, + 1031, + 1032, + 1084, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1456, + 1463, + 1478, + 1146, + 1335, + 1029, + 1033, + 1545, + 1546, + 886, + 821, + 1264, + 1547, + 1334, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 884, + 1175, + 1177, + 1480, + 1245, + 545, + 923, + 984, + 576, + 1464, + 1199, + 921, + 1174 + ], + "topicMatches": [ + 934, + 897, + 832, + 1339, + 937, + 1001, + 898 + ], + "actionMatches": [ + 929, + 8, + 9, + 18, + 19, + 28, + 29, + 38, + 39, + 48, + 49, + 58, + 59, + 68, + 69, + 78, + 79, + 88, + 89, + 98, + 99, + 108, + 109, + 118, + 119, + 128, + 129, + 138, + 139, + 148, + 149, + 158, + 159, + 168, + 169, + 178, + 179, + 188, + 189, + 198, + 199, + 208, + 209, + 218, + 219, + 228, + 229, + 238, + 239, + 248, + 249, + 258, + 259, + 268, + 269, + 278, + 279, + 288, + 289, + 298, + 299, + 308, + 309, + 318, + 319, + 328, + 329, + 338, + 339, + 348, + 349, + 358, + 359, + 368, + 369, + 378, + 379, + 388, + 389, + 398, + 399, + 408, + 409, + 418, + 419, + 433, + 434, + 443, + 444, + 453, + 454, + 463, + 464, + 473, + 474, + 483, + 484, + 493, + 494, + 503, + 504, + 513, + 514, + 523, + 524, + 550, + 553, + 555, + 549, + 554, + 548, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1548, + 1552, + 925, + 889, + 892, + 1096, + 825, + 829, + 822, + 824, + 826, + 828, + 1338, + 1336, + 1337, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 682, + 683, + 739, + 740, + 741, + 742, + 743, + 744, + 1201, + 1204, + 1488, + 888, + 1202, + 1205, + 1268, + 1274, + 1087, + 1269, + 1275, + 890, + 893, + 823, + 827, + 1178, + 1182, + 1485 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize Kevin's thoughts on artificial intelligence\"" + }, + { + "searchText": "Summarize Kevin's thoughts on artificial intelligence", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "Summarize Kevin's thoughts on artificial intelligence", + "filters": [ + { + "actionSearchTerm": { + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false, + "type": [ + "person" + ] + } + ], + "isInformational": false + }, + "searchTerms": [ + "artificial intelligence" + ] + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "person", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "individual", + "weight": 0.9398325247985158 + }, + { + "text": "people", + "weight": 0.9076868595287156 + }, + { + "text": "someone", + "weight": 0.9003335462322876 + }, + { + "text": "subject", + "weight": 0.8866757679593456 + }, + { + "text": "thing", + "weight": 0.8844454967743176 + }, + { + "text": "human being", + "weight": 0.8799390066662353 + }, + { + "text": "creator", + "weight": 0.87429706693071 + }, + { + "text": "author", + "weight": 0.8734207447711351 + }, + { + "text": "stuff", + "weight": 0.8723859154288631 + }, + { + "text": "character", + "weight": 0.8707147559172393 + }, + { + "text": "entity", + "weight": 0.870169815053667 + }, + { + "text": "personality", + "weight": 0.8689387346969449 + }, + { + "text": "user", + "weight": 0.8685874657470585 + }, + { + "text": "kid", + "weight": 0.8683899369981679 + }, + { + "text": "writer", + "weight": 0.8675118148011728 + }, + { + "text": "everyone", + "weight": 0.8664553702825754 + }, + { + "text": "child", + "weight": 0.865335732865511 + }, + { + "text": "company", + "weight": 0.8640522043886203 + }, + { + "text": "you", + "weight": 0.8614744902411489 + }, + { + "text": "article", + "weight": 0.858995594261879 + }, + { + "text": "present", + "weight": 0.8584083757177707 + }, + { + "text": "face", + "weight": 0.857866161810929 + }, + { + "text": "me", + "weight": 0.8573868625112572 + }, + { + "text": "human", + "weight": 0.8571245516076945 + }, + { + "text": "things", + "weight": 0.8548638568637724 + }, + { + "text": "issue", + "weight": 0.8542766882914602 + }, + { + "text": "publisher", + "weight": 0.8533999336154265 + }, + { + "text": "vehicle", + "weight": 0.8527661919273922 + }, + { + "text": "agent", + "weight": 0.8512426121549901 + }, + { + "text": "reader", + "weight": 0.8511660216476135 + }, + { + "text": "he", + "weight": 0.8506912735491127 + }, + { + "text": "humans", + "weight": 0.8505269971912497 + }, + { + "text": "host", + "weight": 0.8503196974783295 + } + ] + }, + { + "term": { + "text": "artificial intelligence", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "human-seeming intelligence", + "weight": 0.8864734409947517 + }, + { + "text": "artificial", + "weight": 0.880934040286398 + }, + { + "text": "strong ai", + "weight": 0.8793549419006241 + }, + { + "text": "generative ai", + "weight": 0.8768135499015218 + }, + { + "text": "generative ai and creativity", + "weight": 0.8763137497687796 + }, + { + "text": "emergent ai", + "weight": 0.8727685866397861 + }, + { + "text": "topic: superhuman ai", + "weight": 0.8724873582946049 + }, + { + "text": "science fiction ai", + "weight": 0.8721353651769909 + }, + { + "text": "ai super intelligence", + "weight": 0.8712886210805108 + }, + { + "text": "artificially generated", + "weight": 0.8700178090671777 + }, + { + "text": "chatbots", + "weight": 0.8685584253229648 + }, + { + "text": "robot behavior", + "weight": 0.8682360373003561 + }, + { + "text": "superhuman ai", + "weight": 0.8672326833864481 + }, + { + "text": "intelligent life", + "weight": 0.8665644533589565 + }, + { + "text": "chatbot", + "weight": 0.8657365939584234 + }, + { + "text": "generative ai systems", + "weight": 0.8626742573760334 + }, + { + "text": "ai and human interaction", + "weight": 0.862162478298934 + }, + { + "text": "intelligence", + "weight": 0.8613737945944341 + }, + { + "text": "written by ai", + "weight": 0.8584006982933475 + }, + { + "text": "artificial events", + "weight": 0.8573337764997134 + }, + { + "text": "self-awareness in chatbots", + "weight": 0.8543346080058085 + }, + { + "text": "skynet", + "weight": 0.8542080117789144 + }, + { + "text": "robot butlers", + "weight": 0.8502714496515634 + } + ] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + } + ] + } + } + } + ], + "rawQuery": "Summarize Kevin's thoughts on artificial intelligence" + } + ], + "results": [ + { + "messageMatches": [ + 91, + 60, + 40, + 66, + 97, + 93, + 36, + 48, + 28, + 99, + 74, + 2, + 62, + 44, + 4, + 84, + 76, + 95, + 87, + 72, + 38, + 30, + 70, + 68, + 46 + ], + "entityMatches": [ + 5, + 15, + 25, + 35, + 45, + 55, + 65, + 75, + 85, + 95, + 105, + 115, + 125, + 135, + 145, + 155, + 165, + 175, + 185, + 195, + 205, + 215, + 225, + 235, + 245, + 255, + 265, + 275, + 285, + 295, + 305, + 315, + 325, + 335, + 345, + 355, + 365, + 375, + 385, + 395, + 405, + 415, + 430, + 440, + 450, + 460, + 470, + 480, + 490, + 500, + 510, + 520, + 544, + 566, + 982, + 6, + 7, + 16, + 17, + 26, + 27, + 36, + 37, + 46, + 47, + 56, + 57, + 66, + 67, + 76, + 77, + 86, + 87, + 96, + 97, + 106, + 107, + 116, + 117, + 126, + 127, + 136, + 137, + 146, + 147, + 156, + 157, + 166, + 167, + 176, + 177, + 186, + 187, + 196, + 197, + 206, + 207, + 216, + 217, + 226, + 227, + 236, + 237, + 246, + 247, + 256, + 257, + 266, + 267, + 276, + 277, + 286, + 287, + 296, + 297, + 306, + 307, + 316, + 317, + 326, + 327, + 336, + 337, + 346, + 347, + 356, + 357, + 366, + 367, + 376, + 377, + 386, + 387, + 396, + 397, + 406, + 407, + 416, + 417, + 431, + 432, + 441, + 442, + 451, + 452, + 461, + 462, + 471, + 472, + 481, + 482, + 491, + 492, + 501, + 502, + 511, + 512, + 521, + 522, + 575, + 597, + 610, + 629, + 651, + 770, + 818, + 887, + 920, + 980, + 981, + 983, + 985, + 1031, + 1032, + 1084, + 1330, + 1332, + 1350, + 1351, + 1352, + 1353, + 1456, + 1463, + 1478, + 1146, + 1335, + 1029, + 1033, + 1545, + 1546, + 886, + 821, + 1264, + 1547, + 1334, + 885, + 1200, + 1465, + 1466, + 681, + 819, + 820, + 884, + 1175, + 1177, + 1480, + 1245, + 545, + 923, + 984, + 576, + 1464, + 1199, + 921, + 1174 + ], + "topicMatches": [ + 934, + 897, + 832, + 1339, + 937, + 1001, + 898 + ], + "actionMatches": [ + 929, + 8, + 9, + 18, + 19, + 28, + 29, + 38, + 39, + 48, + 49, + 58, + 59, + 68, + 69, + 78, + 79, + 88, + 89, + 98, + 99, + 108, + 109, + 118, + 119, + 128, + 129, + 138, + 139, + 148, + 149, + 158, + 159, + 168, + 169, + 178, + 179, + 188, + 189, + 198, + 199, + 208, + 209, + 218, + 219, + 228, + 229, + 238, + 239, + 248, + 249, + 258, + 259, + 268, + 269, + 278, + 279, + 288, + 289, + 298, + 299, + 308, + 309, + 318, + 319, + 328, + 329, + 338, + 339, + 348, + 349, + 358, + 359, + 368, + 369, + 378, + 379, + 388, + 389, + 398, + 399, + 408, + 409, + 418, + 419, + 433, + 434, + 443, + 444, + 453, + 454, + 463, + 464, + 473, + 474, + 483, + 484, + 493, + 494, + 503, + 504, + 513, + 514, + 523, + 524, + 550, + 553, + 555, + 549, + 554, + 548, + 772, + 774, + 775, + 1149, + 1151, + 1376, + 1377, + 1378, + 1548, + 1552, + 925, + 889, + 892, + 1096, + 825, + 829, + 822, + 824, + 826, + 828, + 1338, + 1336, + 1337, + 992, + 993, + 1085, + 1086, + 1089, + 1091, + 1092, + 1095, + 654, + 655, + 656, + 657, + 682, + 683, + 739, + 740, + 741, + 742, + 743, + 744, + 1201, + 1204, + 1488, + 888, + 1202, + 1205, + 1268, + 1274, + 1087, + 1269, + 1275, + 890, + 893, + 823, + 827, + 1178, + 1182, + 1485 + ] + } + ], + "cmd": "@kpSearch --query \"Summarize Kevin's thoughts on artificial intelligence\" --fastStop false" + }, + { + "searchText": "Tell me what Kevin said about 'portids'?", + "searchQueryExpr": { + "searchExpressions": [ + { + "rewrittenQuery": "What did Kevin say about 'portids'?", + "filters": [ + { + "actionSearchTerm": { + "actionVerbs": { + "words": [ + "say" + ], + "tense": "Past" + }, + "actorEntities": [ + { + "name": "Kevin", + "isNamePronoun": false + } + ], + "targetEntities": [ + { + "name": "portids", + "isNamePronoun": false + } + ], + "isInformational": false + } + } + ] + } + ] + }, + "compiledQueryExpr": [ + { + "selectExpressions": [ + { + "searchTermGroup": { + "booleanOp": "or", + "terms": [ + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ] + }, + { + "term": { + "text": "portids", + "weight": 10 + }, + "relatedTerms": [] + } + ] + }, + "when": { + "scopeDefiningTerms": { + "booleanOp": "and", + "terms": [ + { + "propertyName": "subject", + "propertyValue": { + "term": { + "text": "kevin", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "kevin scott" + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "verb", + "propertyValue": { + "term": { + "text": "say", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "say", + "weight": 10 + }, + { + "text": "talk", + "weight": 0.8855213424269025 + }, + { + "text": "mention", + "weight": 0.8713759993635978 + }, + { + "text": "ask", + "weight": 0.863672464925787 + }, + { + "text": "think", + "weight": 0.8603732193654625 + }, + { + "text": "quotes", + "weight": 0.8591768794646556 + }, + { + "text": "answer", + "weight": 0.8576717178465181 + }, + { + "text": "sense", + "weight": 0.8574937590192444 + }, + { + "text": "voice", + "weight": 0.8554061758452839 + }, + { + "text": "comment", + "weight": 0.8545279622319091 + }, + { + "text": "pay", + "weight": 0.8542760193310156 + }, + { + "text": "show", + "weight": 0.8530012732076532 + }, + { + "text": "play", + "weight": 0.8503518759329526 + } + ], + "relatedTermsRequired": true + } + }, + { + "booleanOp": "or", + "terms": [ + { + "propertyName": "object", + "propertyValue": { + "term": { + "text": "portids", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "portids", + "weight": 10 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "name", + "propertyValue": { + "term": { + "text": "portids", + "weight": 100 + }, + "relatedTerms": [ + { + "text": "portids", + "weight": 10 + } + ], + "relatedTermsRequired": true + } + }, + { + "propertyName": "topic", + "propertyValue": { + "term": { + "text": "portids", + "weight": 10 + }, + "relatedTerms": [ + { + "text": "portids", + "weight": 10 + } + ], + "relatedTermsRequired": true + } + } + ] + } + ] + } + } + } + ], + "rawQuery": "What did Kevin say about 'portids'?" + } + ], + "results": [ + { + "messageMatches": [ + 30 + ], + "entityMatches": [ + 145, + 851 + ], + "actionMatches": [ + 148, + 149, + 853, + 854 + ] + } + ], + "cmd": "@kpSearch --query \"Tell me what Kevin said about 'portids'?\"" + } +] \ No newline at end of file diff --git a/tools/benchmark_query.py b/tools/benchmark_query.py new file mode 100644 index 00000000..fbebe869 --- /dev/null +++ b/tools/benchmark_query.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Benchmark lookup_term_filtered as a standalone script. + +Usage: + uv run python tools/benchmark_query.py +""" + +from __future__ import annotations + +import argparse +from collections.abc import Awaitable, Callable +import hashlib +import os +import shutil +import statistics +import tempfile +import time + +import numpy as np + +from typeagent.aitools.embeddings import ( + CachingEmbeddingModel, + NormalizedEmbedding, + NormalizedEmbeddings, +) +from typeagent.knowpro.convsettings import ConversationSettings +from typeagent.knowpro.interfaces_core import Term +from typeagent.knowpro.query import lookup_term_filtered +from typeagent.storage.sqlite.provider import SqliteStorageProvider +from typeagent.transcripts.transcript import ( + Transcript, + TranscriptMessage, + TranscriptMessageMeta, +) + + +class DeterministicBenchmarkEmbedder: + def __init__(self, embedding_size: int) -> None: + self._embedding_size = embedding_size + + @property + def model_name(self) -> str: + return "benchmark-local" + + async def get_embedding_nocache(self, input: str) -> NormalizedEmbedding: + return _compute_embedding(input, self._embedding_size) + + async def get_embeddings_nocache(self, input: list[str]) -> NormalizedEmbeddings: + if not input: + raise ValueError("Cannot embed an empty list") + return np.stack( + [_compute_embedding(value, self._embedding_size) for value in input] + ).astype(np.float32) + + +def _compute_embedding(text: str, embedding_size: int) -> NormalizedEmbedding: + digest = hashlib.sha256(text.encode("utf-8")).digest() + repeats = (embedding_size + len(digest) - 1) // len(digest) + data = (digest * repeats)[:embedding_size] + embedding = np.frombuffer(data, dtype=np.uint8).astype(np.float32) + embedding = embedding - np.float32(127.5) + norm = np.float32(np.linalg.norm(embedding)) + if norm > 0: + embedding = embedding / norm + return embedding.astype(np.float32) + + +def create_benchmark_embedding_model(embedding_size: int) -> CachingEmbeddingModel: + return CachingEmbeddingModel(DeterministicBenchmarkEmbedder(embedding_size)) + + +def create_arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="Benchmark lookup_term_filtered with a synthetic transcript.", + ) + parser.add_argument( + "--messages", + type=int, + default=200, + help="Number of synthetic messages to index before running the benchmark.", + ) + parser.add_argument( + "--rounds", + type=int, + default=200, + help="Number of timed rounds to run.", + ) + parser.add_argument( + "--warmup-rounds", + type=int, + default=20, + help="Number of untimed warmup rounds to run first.", + ) + parser.add_argument( + "--embedding-size", + type=int, + default=16, + help="Embedding size for the local deterministic benchmark model.", + ) + return parser + + +def make_settings(embedding_size: int) -> ConversationSettings: + settings = ConversationSettings( + model=create_benchmark_embedding_model(embedding_size) + ) + settings.semantic_ref_index_settings.auto_extract_knowledge = False + return settings + + +def synthetic_messages(count: int) -> list[TranscriptMessage]: + return [ + TranscriptMessage( + text_chunks=[f"Message {i} about topic {i % 10}"], + metadata=TranscriptMessageMeta(speaker=f"Speaker{i % 3}"), + tags=[f"tag{i % 5}"], + ) + for i in range(count) + ] + + +async def create_indexed_transcript( + settings: ConversationSettings, + storage: SqliteStorageProvider, + message_count: int, +) -> Transcript: + settings.storage_provider = storage + transcript = await Transcript.create(settings, name="benchmark-query") + await transcript.add_messages_with_indexing(synthetic_messages(message_count)) + return transcript + + +async def find_best_term(transcript: Transcript) -> tuple[str, int]: + semref_index = transcript.semantic_ref_index + assert semref_index is not None + + best_term: str | None = None + best_count = 0 + + for term in await semref_index.get_terms(): + refs = await semref_index.lookup_term(term) + ref_count = len(refs) if refs is not None else 0 + if ref_count > best_count: + best_count = ref_count + best_term = term + + if best_term is None: + raise ValueError("No terms found after indexing") + + return best_term, best_count + + +async def run_benchmark( + target: Callable[[], Awaitable[None]], + rounds: int, + warmup_rounds: int, +) -> list[float]: + for _ in range(warmup_rounds): + await target() + + samples_us: list[float] = [] + for _ in range(rounds): + start = time.perf_counter_ns() + await target() + elapsed_us = (time.perf_counter_ns() - start) / 1_000 + samples_us.append(elapsed_us) + return samples_us + + +def print_report( + label: str, samples_us: list[float], rounds: int, warmup_rounds: int +) -> None: + print(label) + print(f" rounds: {rounds} ({warmup_rounds} warmup)") + print(f" min: {min(samples_us):9.3f} us") + print(f" mean: {statistics.fmean(samples_us):9.3f} us") + print(f" median: {statistics.median(samples_us):9.3f} us") + print(f" max: {max(samples_us):9.3f} us") + + +async def main() -> None: + args = create_arg_parser().parse_args() + temp_dir = tempfile.mkdtemp(prefix="benchmark-query-") + db_path = os.path.join(temp_dir, "query_bench.db") + + settings = make_settings(args.embedding_size) + storage = SqliteStorageProvider( + db_path, + message_type=TranscriptMessage, + message_text_index_settings=settings.message_text_index_settings, + related_term_index_settings=settings.related_term_index_settings, + ) + + try: + transcript = await create_indexed_transcript(settings, storage, args.messages) + best_term, best_count = await find_best_term(transcript) + print(f"Benchmarking term {best_term!r} with {best_count} matches") + + term = Term(text=best_term) + semref_index = transcript.semantic_ref_index + semantic_refs = transcript.semantic_refs + assert semref_index is not None + assert semantic_refs is not None + + async def target() -> None: + results = await lookup_term_filtered( + semref_index, + term, + semantic_refs, + lambda _metadata, _scored: True, + ) + if results is None: + raise ValueError(f"No results found for {best_term!r}") + + samples_us = await run_benchmark(target, args.rounds, args.warmup_rounds) + print_report( + "lookup_term_filtered (accept-all filter)", + samples_us, + args.rounds, + args.warmup_rounds, + ) + finally: + await storage.close() + shutil.rmtree(temp_dir) + + +if __name__ == "__main__": + import asyncio + + asyncio.run(main()) diff --git a/tools/benchmark_vectorbase.py b/tools/benchmark_vectorbase.py new file mode 100644 index 00000000..d14314a7 --- /dev/null +++ b/tools/benchmark_vectorbase.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Benchmark VectorBase lookup methods as a standalone script. + +Usage: + uv run python tools/benchmark_vectorbase.py +""" + +from __future__ import annotations + +import argparse +from collections.abc import Callable +import statistics +import time + +import numpy as np + +from typeagent.aitools.embeddings import NormalizedEmbedding, NormalizedEmbeddings +from typeagent.aitools.vectorbase import ( + ScoredInt, + TextEmbeddingIndexSettings, + VectorBase, +) + + +class NullEmbeddingModel: + @property + def model_name(self) -> str: + return "benchmark-local" + + def add_embedding(self, key: str, embedding: NormalizedEmbedding) -> None: + return None + + async def get_embedding_nocache(self, input: str) -> NormalizedEmbedding: + raise RuntimeError("VectorBase benchmark does not use embedding generation") + + async def get_embeddings_nocache(self, input: list[str]) -> NormalizedEmbeddings: + raise RuntimeError("VectorBase benchmark does not use embedding generation") + + async def get_embedding(self, key: str) -> NormalizedEmbedding: + raise RuntimeError("VectorBase benchmark does not use embedding generation") + + async def get_embeddings(self, keys: list[str]) -> NormalizedEmbeddings: + raise RuntimeError("VectorBase benchmark does not use embedding generation") + + +def create_arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="Benchmark VectorBase lookup methods with synthetic vectors.", + ) + parser.add_argument( + "--rounds", + type=int, + default=200, + help="Number of timed rounds to run for each benchmark.", + ) + parser.add_argument( + "--warmup-rounds", + type=int, + default=20, + help="Number of untimed warmup rounds to run first.", + ) + parser.add_argument( + "--dim", + type=int, + default=384, + help="Embedding dimension to generate.", + ) + parser.add_argument( + "--subset-size", + type=int, + default=1_000, + help="Subset size for fuzzy_lookup_embedding_in_subset.", + ) + return parser + + +def make_vectorbase( + vector_count: int, dim: int, seed: int +) -> tuple[VectorBase, NormalizedEmbedding]: + rng = np.random.default_rng(seed) + vectors = rng.standard_normal((vector_count, dim)).astype(np.float32) + norms = np.linalg.norm(vectors, axis=1, keepdims=True) + vectors /= norms + + settings = TextEmbeddingIndexSettings(embedding_model=NullEmbeddingModel()) + vectorbase = VectorBase(settings) + vectorbase.add_embeddings(None, vectors) + + query = rng.standard_normal(dim).astype(np.float32) + query /= np.linalg.norm(query) + return vectorbase, query + + +def run_benchmark( + target: Callable[[], list[ScoredInt]], rounds: int, warmup_rounds: int +) -> list[float]: + for _ in range(warmup_rounds): + target() + + samples_us: list[float] = [] + for _ in range(rounds): + start = time.perf_counter_ns() + target() + elapsed_us = (time.perf_counter_ns() - start) / 1_000 + samples_us.append(elapsed_us) + return samples_us + + +def validate_result(result: list[ScoredInt]) -> None: + if len(result) != 10: + raise ValueError(f"Expected 10 hits, got {len(result)}") + if not all(isinstance(item, ScoredInt) for item in result): + raise TypeError("Expected every result item to be a ScoredInt") + + +def print_report( + label: str, samples_us: list[float], rounds: int, warmup_rounds: int +) -> None: + print(label) + print(f" rounds: {rounds} ({warmup_rounds} warmup)") + print(f" min: {min(samples_us):9.3f} us") + print(f" mean: {statistics.fmean(samples_us):9.3f} us") + print(f" median: {statistics.median(samples_us):9.3f} us") + print(f" max: {max(samples_us):9.3f} us") + + +def main() -> None: + args = create_arg_parser().parse_args() + + vb_1k, query_1k = make_vectorbase(1_000, args.dim, seed=42) + vb_10k, query_10k = make_vectorbase(10_000, args.dim, seed=43) + subset_rng = np.random.default_rng(99) + subset = subset_rng.choice(10_000, size=args.subset_size, replace=False).tolist() + + benchmarks: list[tuple[str, Callable[[], list[ScoredInt]]]] = [ + ( + "fuzzy_lookup_embedding (1k vectors)", + lambda: vb_1k.fuzzy_lookup_embedding(query_1k, max_hits=10, min_score=0.0), + ), + ( + "fuzzy_lookup_embedding (10k vectors)", + lambda: vb_10k.fuzzy_lookup_embedding( + query_10k, max_hits=10, min_score=0.0 + ), + ), + ( + f"fuzzy_lookup_embedding_in_subset ({args.subset_size} of 10k)", + lambda: vb_10k.fuzzy_lookup_embedding_in_subset( + query_10k, + subset, + max_hits=10, + min_score=0.0, + ), + ), + ] + + for label, target in benchmarks: + validate_result(target()) + samples_us = run_benchmark(target, args.rounds, args.warmup_rounds) + print_report(label, samples_us, args.rounds, args.warmup_rounds) + + +if __name__ == "__main__": + main() diff --git a/tools/chat_sessions.py b/tools/chat_sessions.py new file mode 100755 index 00000000..683898c9 --- /dev/null +++ b/tools/chat_sessions.py @@ -0,0 +1,1168 @@ +#!/usr/bin/env python3 + +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Browse VS Code Copilot chat sessions stored on disk. + +Usage: + python tools/chat_sessions.py # list all sessions + python tools/chat_sessions.py -n 5 # list 5 most recent + python tools/chat_sessions.py --all # include empty sessions + python tools/chat_sessions.py # show full conversation + python tools/chat_sessions.py # show session by list index + python tools/chat_sessions.py -s # search messages for text +""" + +import argparse +from collections.abc import Iterator +import contextlib +import datetime +import io +import json +import os +from pathlib import Path +import re +import shlex +import shutil +import subprocess +import sys +from typing import Any + +from colorama import Fore, init, Style + + +def _detect_vscode_user_dir() -> list[Path]: + """Detect VS Code user directories for current environment. + + Returns a list of directories to search, in priority order: + 1. VSCode Server (if .vscode-server exists) + 2. Native VS Code installation for the platform + 3. Windows VS Code via WSL mount (if on WSL) + """ + dirs: list[Path] = [] + + # VSCode Server (for remote SSH, WSL, containers, etc.) + vscode_server = Path.home() / ".vscode-server" / "data" / "User" + if vscode_server.is_dir(): + dirs.append(vscode_server) + + # Platform-specific native VS Code + if sys.platform == "linux": + dirs.append(Path.home() / ".config" / "Code" / "User") + elif sys.platform == "win32": + dirs.append(Path.home() / "AppData" / "Roaming" / "Code" / "User") + elif sys.platform == "darwin": + dirs.append(Path.home() / "Library" / "Application Support" / "Code" / "User") + + # Windows via WSL mount (if running on WSL) + if sys.platform == "linux" and Path("/mnt/c").exists(): + win_user = Path("/mnt/c/Users") + if win_user.is_dir(): + # Try to find the current user's home in Windows + for user_dir in win_user.iterdir(): + if user_dir.is_dir(): + vscode_win = user_dir / "AppData" / "Roaming" / "Code" / "User" + if vscode_win.is_dir(): + dirs.append(vscode_win) + break + + return dirs + + +# Color settings +use_color = True + +# Regex to match ANSI escape sequences +ANSI_ESCAPE = re.compile(r"\x1b\[[0-9;]*m") + + +def visible_len(text: str) -> int: + """Return the visible length of text, excluding ANSI escape sequences.""" + return len(ANSI_ESCAPE.sub("", text)) + + +def highlight_query(text: str, query: str) -> str: + """Highlight all occurrences of query in text (case-insensitive).""" + if not use_color: + return text + # Replace all occurrences of query with highlighted version (case-insensitive) + pattern = re.compile(re.escape(query), re.IGNORECASE) + highlighted = pattern.sub( + lambda m: f"{Fore.RED}{m.group()}{Style.RESET_ALL}", + text, + ) + return highlighted + + +def clip_to_visible_length(text: str, target_length: int) -> str: + """Clip text to a target visible length, accounting for ANSI escape codes. + + Splits the string into ANSI-code tokens and plain-character tokens, then + reconstructs from the left until the visible character count reaches the + target. This avoids splitting in the middle of an escape sequence. + """ + # Tokenize: alternate between non-ANSI runs and ANSI sequences + tokens = ANSI_ESCAPE.split(text) + ansi_codes = ANSI_ESCAPE.findall(text) + + result = [] + visible = 0 + # Interleave: tokens[0], ansi_codes[0], tokens[1], ansi_codes[1], ... + for i, plain in enumerate(tokens): + remaining = target_length - visible + if len(plain) <= remaining: + result.append(plain) + visible += len(plain) + else: + result.append(plain[:remaining]) + visible += remaining + # Consume any pending ANSI codes to reset state, then stop + if i < len(ansi_codes): + result.append(ansi_codes[i]) + break + if i < len(ansi_codes): + result.append(ansi_codes[i]) # ANSI codes don't count as visible + return "".join(result) + + +def should_use_color(args: argparse.Namespace | None = None) -> bool: + """Determine if color should be used based on args and environment.""" + # Check explicit command-line flags first + if args is not None: + if hasattr(args, "color"): + if args.color == "always": + return True + if args.color == "never": + return False + # Check environment variables + if os.environ.get("NO_COLOR"): + return False + if os.environ.get("FORCE_COLOR"): + return True + # Default: use color if output is a TTY + return sys.stdout.isatty() + + +def find_session_dirs() -> list[Path]: + """Find all chat session directories across workspaces and global storage. + + Searches for both old format (chatSessions) and new format (GitHub.copilot-chat). + """ + dirs: list[Path] = [] + search_dirs = _detect_vscode_user_dir() + + for base_dir in search_dirs: + if not base_dir.is_dir(): + continue + + # Per-workspace sessions (old format) + ws_root = base_dir / "workspaceStorage" + if ws_root.is_dir(): + for entry in ws_root.iterdir(): + if not entry.is_dir(): + continue + # Old format: chatSessions + chat_dir = entry / "chatSessions" + if chat_dir.is_dir(): + dirs.append(chat_dir) + # New format: GitHub.copilot-chat + copilot_dir = entry / "GitHub.copilot-chat" + if copilot_dir.is_dir(): + dirs.append(copilot_dir) + + # Global (empty window) sessions (old format) + global_dir = base_dir / "globalStorage" / "emptyWindowChatSessions" + if global_dir.is_dir(): + dirs.append(global_dir) + + return dirs + + +def get_workspace_name(session_dir: Path) -> str: + """Try to resolve a workspace name from workspace.json next to session dir.""" + ws_json = session_dir.parent / "workspace.json" + if ws_json.is_file(): + try: + data = json.loads(ws_json.read_text()) + folder = data.get("folder") + if folder: + # "file:///Users/guido/typeagent-py" -> "typeagent-py" + return folder.rstrip("/").rsplit("/", 1)[-1] + except (json.JSONDecodeError, OSError): + pass + if "emptyWindowChatSessions" in str(session_dir): + return "(no workspace)" + return session_dir.parent.name[:12] + + +type SessionInfo = dict[str, Any] + + +def _splice(target: list[Any], index: int, items: list[Any]) -> None: + """Splice items into target at index, extending if needed.""" + while len(target) < index: + target.append(None) + target[index : index + len(items)] = items + + +_RE_CUSTOM_TITLE_JSONL = re.compile( + r'"customTitle"\s*]\s*,\s*"v"\s*:\s*"((?:[^"\\]|\\.)*)"' +) + + +def parse_jsonl_metadata(path: Path) -> SessionInfo | None: + """Fast metadata extraction from a .jsonl chat session file. + + Reads the first line (kind-0 session metadata snapshot) and a few KB + after it (for customTitle patches and first user message) to avoid + reading multi-MB files fully. + Falls back to full parse if the first line isn't a valid kind-0 record. + """ + size = path.stat().st_size + if size == 0: + return None + + with open(path, "rb") as fh: + first_line_bytes = fh.readline() + line1_end = fh.tell() + # Read a few KB more for customTitle patches (kind-1, lines 2-3) + # and possibly the first user message. + extra = fh.read(min(size - line1_end, 4096)).decode("utf-8", errors="replace") + + first_line = first_line_bytes.decode("utf-8", errors="replace") + if not first_line.strip(): + return None + try: + record = json.loads(first_line) + except json.JSONDecodeError: + return None + + if record.get("kind") != 0: + return parse_jsonl(path) # fall back + + info: SessionInfo = { + "path": str(path), + "session_id": path.stem, + "title": None, + "creation_date": None, + "size": size, + "requests": [], + } + + v = record.get("v", {}) + if isinstance(v, dict): + if ts := v.get("creationDate"): + info["creation_date"] = ts + model_info = ( + v.get("inputState", {}).get("selectedModel", {}).get("metadata", {}) + ) + info["model"] = model_info.get("name", "") + if v.get("customTitle"): + info["title"] = v["customTitle"] + # First user message from initial snapshot + reqs = v.get("requests", []) + if reqs and isinstance(reqs[0], dict): + first_user = reqs[0].get("message", {}).get("text", "") + if first_user: + info["requests"].append({"user": first_user}) + + # Look for customTitle patches in the extra bytes after line 1. + # Kind-1 patches for customTitle are small lines near the start of the file. + if not info.get("title") and extra: + m = _RE_CUSTOM_TITLE_JSONL.search(extra) + if m: + info["title"] = m.group(1).replace("\\n", "\n").replace('\\"', '"') + + # Look for first user message in extra bytes (unlikely to be there since + # "message" is deep in the request patch line, but try anyway). + if not info["requests"] and extra: + m = _RE_FIRST_MSG.search(extra) + if m: + first_user = m.group(1).replace("\\n", "\n").replace('\\"', '"') + info["requests"].append({"user": first_user}) + + # If we still have no requests but the extra bytes contain a kind-2 + # request splice, the file has requests (we just can't extract the text + # from a small buffer). + if not info["requests"] and '"requests"' in extra: + info["requests"].append({"user": ""}) + + return info + + +def parse_jsonl(path: Path) -> SessionInfo | None: + """Parse a .jsonl chat session file. + + The JSONL format is a delta/patch stream: + kind 0: session metadata (creationDate, model, etc.) + kind 1: property update at key-path k + kind 2: array splice — v is the new items, i is the offset in the + array identified by k (e.g. ["requests"] or + ["requests", 0, "response"]) + We reconstruct the final session state by replaying all patches. + """ + lines = path.read_text(errors="replace").strip().splitlines() + if not lines: + return None + + info: SessionInfo = { + "path": str(path), + "session_id": path.stem, + "title": None, + "creation_date": None, + "size": path.stat().st_size, + "requests": [], + } + + # Accumulate raw request dicts; patches are applied in order. + raw_requests: list[dict[str, Any]] = [] + + for line in lines: + try: + record = json.loads(line) + except json.JSONDecodeError: + continue + + kind = record.get("kind") + k: list[Any] = record.get("k", []) + v = record.get("v") + i: int | None = record.get("i") + + if kind == 0 and isinstance(v, dict): + # Session metadata + ts = v.get("creationDate") + if ts: + info["creation_date"] = ts + model_info = ( + v.get("inputState", {}).get("selectedModel", {}).get("metadata", {}) + ) + info["model"] = model_info.get("name", "") + if v.get("customTitle"): + info["title"] = v["customTitle"] + # Initial snapshot may include requests already. + for req in v.get("requests", []): + if isinstance(req, dict): + raw_requests.append(req) + + elif kind == 1: + # Scalar property update at key-path k + if "customTitle" in k: + info["title"] = v + elif ( + len(k) >= 3 + and k[0] == "requests" + and isinstance(k[1], int) + and k[1] < len(raw_requests) + ): + # e.g. k: ["requests", 0, "modelState"] + raw_requests[k[1]][k[2]] = v + + elif kind == 2: + items = v if isinstance(v, list) else [] + if k == ["requests"]: + # Full request objects + if i is not None: + _splice(raw_requests, i, items) + else: + raw_requests.extend(items) + elif ( + len(k) >= 3 + and k[0] == "requests" + and isinstance(k[1], int) + and k[1] < len(raw_requests) + ): + # Patch a sub-array, e.g. k: ["requests", 0, "response"] + req_idx = k[1] + prop = k[2] + arr = raw_requests[req_idx].get(prop) + if not isinstance(arr, list): + arr = [] + if i is not None: + _splice(arr, i, items) + else: + arr.extend(items) + raw_requests[req_idx][prop] = arr + + # Parse the final reconstructed state of each request. + for req in raw_requests: + parsed = _parse_request(req) + if parsed: + info["requests"].append(parsed) + + return info + + +# Regexes for fast tail-of-file metadata extraction from JSON files. +_RE_CREATION_DATE = re.compile(r'"creationDate"\s*:\s*(\d+)') +_RE_CUSTOM_TITLE = re.compile(r'"customTitle"\s*:\s*"((?:[^"\\]|\\.)*)"') +_RE_SESSION_ID = re.compile(r'"sessionId"\s*:\s*"([^"]+)"') +# Match "message":{ ... "text": "..." } — the ... allows for "parts" or other +# keys that may appear before "text" in old JSON format (version 3). +# Capture is capped at 200 chars so a closing quote beyond the read buffer +# doesn't prevent the match. +_RE_FIRST_MSG = re.compile( + r'"message"\s*:\s*\{.*?"text"\s*:\s*"((?:[^"\\]|\\.){0,200})', re.DOTALL +) + + +def parse_json_metadata(path: Path) -> SessionInfo | None: + """Fast metadata extraction from a .json chat session file. + + Reads the last 1KB to extract creationDate, customTitle, sessionId + (which live at the end of the file), and the first 2KB to get the + first user message. Falls back to full parse if the tail doesn't + end with the expected closing brace. + """ + size = path.stat().st_size + if size == 0: + return None + + # Read last 1KB for metadata fields that live at the end. + with open(path, "rb") as fh: + fh.seek(max(0, size - 1024)) + tail = fh.read().decode("utf-8", errors="replace") + + # Sanity check: file should end with "}" + if not tail.rstrip().endswith("}"): + return parse_json(path) # fall back to full parse + + m = _RE_CREATION_DATE.search(tail) + creation_date = int(m.group(1)) if m else None + + m = _RE_CUSTOM_TITLE.search(tail) + title = m.group(1) if m else None + + m = _RE_SESSION_ID.search(tail) + session_id = m.group(1) if m else path.stem + + # Read first 4KB for the first user message. + with open(path, "rb") as fh: + head = fh.read(4096).decode("utf-8", errors="replace") + + m = _RE_FIRST_MSG.search(head) + first_user = m.group(1) if m else "" + # Unescape basic JSON escapes in the extracted string. + if first_user: + first_user = first_user.replace("\\n", "\n").replace('\\"', '"') + + # Check whether the file has any requests ("requests": [...]) + has_requests = '"requests"' in head and '"requests": []' not in head + + info: SessionInfo = { + "path": str(path), + "session_id": session_id, + "title": title, + "creation_date": creation_date, + "model": "", + "size": size, + "requests": [{"user": first_user}] if has_requests else [], + } + return info + + +def parse_json(path: Path) -> SessionInfo | None: + """Parse a .json chat session file (full parse).""" + try: + data = json.loads(path.read_text(errors="replace")) + except json.JSONDecodeError: + return None + + info: SessionInfo = { + "path": str(path), + "session_id": data.get("sessionId", path.stem), + "title": data.get("customTitle"), + "creation_date": data.get("creationDate"), + "size": path.stat().st_size, + "model": ( + data.get("inputState", {}) + .get("selectedModel", {}) + .get("metadata", {}) + .get("name", "") + ), + "requests": [], + } + + for req in data.get("requests", []): + parsed = _parse_request(req) + if parsed: + info["requests"].append(parsed) + + return info + + +def _parse_request(req: dict[str, Any]) -> dict[str, Any] | None: + """Extract user message and assistant response from a request object.""" + if not isinstance(req, dict): + return None + + user_text = req.get("message", {}).get("text", "") + timestamp = req.get("timestamp") + model_id = req.get("modelId", "") + + # modelState.value: 1 = completed, 4 = cancelled + model_state_raw = req.get("modelState", {}) + model_state = ( + model_state_raw.get("value") if isinstance(model_state_raw, dict) else None + ) + + # Collect assistant response text + response_parts: list[str] = [] + thinking_parts: list[str] = [] + for part in req.get("response", []): + if isinstance(part, dict): + if part.get("kind") == "thinking" and part.get("value"): + thinking_parts.append(part["value"]) + elif "value" in part and isinstance(part["value"], str) and part["value"]: + if part.get("kind") not in ("thinking", "toolInvocationSerialized"): + response_parts.append(part["value"]) + + # Collect tool calls + tool_calls: list[str] = [] + for part in req.get("response", []): + if isinstance(part, dict) and part.get("kind") == "toolInvocationSerialized": + tool_id = part.get("toolId", "") + tool_data = part.get("toolSpecificData", {}) + if isinstance(tool_data, dict): + cmd = tool_data.get("commandLine", {}) + if isinstance(cmd, dict): + display = cmd.get("forDisplay", cmd.get("original", "")) + if display: + tool_calls.append(display.strip()) + continue + # Non-terminal tools: show a short label + if tool_id: + tool_calls.append(f"[{tool_id}]") + + return { + "user": user_text, + "assistant": "\n".join(response_parts), + "thinking": "\n".join(thinking_parts), + "tools": tool_calls, + "timestamp": timestamp, + "model": model_id, + "model_state": model_state, + } + + +def load_all_sessions( + metadata_only: bool = False, limit: int | None = None +) -> list[SessionInfo]: + """Load all sessions from disk. + + Handles both old format (JSON/JSONL files) and new format (GitHub.copilot-chat). + + When metadata_only is True, use fast head+tail extraction instead of + full parsing. Use load_session_by_path() for full parsing. + + When limit is set and metadata_only is True, use file mtime to + pre-sort candidates and only parse the most recent ones. This + avoids the I/O cost of reading all files over slow filesystems. + """ + sessions: list[SessionInfo] = [] + + # Collect candidate files: (path, workspace, suffix). + # For new-format directories, create SessionInfo immediately (no parsing). + candidates: list[tuple[Path, str]] = [] + + for session_dir in find_session_dirs(): + workspace = get_workspace_name(session_dir) + + # Check if this is a GitHub.copilot-chat directory (new format) + if "GitHub.copilot-chat" in str(session_dir): + # New format: sessions are directories in chat-session-resources/ + chat_resources = session_dir / "chat-session-resources" + if chat_resources.is_dir(): + for session_uuid_dir in chat_resources.iterdir(): + if not session_uuid_dir.is_dir(): + continue + session_id = session_uuid_dir.name + info: SessionInfo = { + "path": str(session_uuid_dir), + "session_id": session_id, + "title": None, + "creation_date": None, + "size": 0, + "model": "", + "requests": [], + "workspace": workspace, + } + sessions.append(info) + else: + # Old format: JSON/JSONL files + for f in session_dir.iterdir(): + if f.suffix in (".jsonl", ".json"): + candidates.append((f, workspace)) + + # When we have a limit and only need metadata, use mtime to pre-sort + # so we only parse the most recent files. + if metadata_only and limit and len(candidates) > limit: + # stat each file for mtime (cheap compared to open+read+parse) + mtime_candidates: list[tuple[float, Path, str]] = [] + for f, workspace in candidates: + try: + mtime_candidates.append((f.stat().st_mtime, f, workspace)) + except OSError: + continue + mtime_candidates.sort(reverse=True) + # Parse 2x the limit to allow for empty sessions being filtered out. + candidates = [(f, ws) for _, f, ws in mtime_candidates[: limit * 2]] + + # Parse the candidate files. + for f, workspace in candidates: + if metadata_only: + parsed_info = ( + parse_jsonl_metadata(f) + if f.suffix == ".jsonl" + else parse_json_metadata(f) + ) + else: + parsed_info = parse_jsonl(f) if f.suffix == ".jsonl" else parse_json(f) + if parsed_info is not None: + parsed_info["workspace"] = workspace + sessions.append(parsed_info) + + # Sort by creation date (newest first) + sessions.sort( + key=lambda s: s.get("creation_date") or 0, + reverse=True, + ) + return sessions + + +def load_session_by_path(path_str: str) -> SessionInfo | None: + """Fully parse a single session file by its path.""" + path = Path(path_str) + if path.is_dir(): + # New format directory — no full parse available yet + return None + if path.suffix == ".jsonl": + return parse_jsonl(path) + elif path.suffix == ".json": + return parse_json(path) + return None + + +def format_timestamp(ts: int | None) -> str: + if not ts: + return "?" + # VS Code stores timestamps in milliseconds + dt = datetime.datetime.fromtimestamp(ts / 1000) + return dt.strftime("%Y-%m-%d %H:%M") + + +def get_terminal_width() -> int: + """Get terminal character width.""" + return shutil.get_terminal_size(fallback=(80, 24)).columns + + +def list_sessions( + sessions: list[SessionInfo], + limit: int | None = None, + show_all: bool = False, + term_width: int | None = None, +) -> None: + """Print a summary table of sessions.""" + to_show = sessions[:limit] if limit else sessions + width = term_width if term_width is not None else 999999 + for i, s in enumerate(to_show): + reqs = s.get("requests", []) + if not reqs and not show_all: + continue + title = s.get("title") + first_msg = "" + if reqs: + first_msg = reqs[0].get("user", "") + label = title or first_msg or "(empty)" + # Remove newlines to prevent formatting issues + label = label.replace("\n", " ").replace("\r", "") + date_str = format_timestamp(s.get("creation_date")) + workspace = s.get("workspace", "?") + size_kb = s.get("size", 0) / 1024 + if size_kb >= 1024: + size_str = f"{size_kb / 1024:.1f}M" + else: + size_str = f"{size_kb:.0f}K" + + if use_color: + # Colorize the session listing + line = ( + f" {Fore.CYAN}{i + 1:3d}{Style.RESET_ALL}. " + f"[{Fore.YELLOW}{date_str}{Style.RESET_ALL}] " + f"({Fore.MAGENTA}{workspace}{Style.RESET_ALL}) " + f"{Fore.GREEN}{size_str:>5}{Style.RESET_ALL} {label}" + ) + else: + line = f" {i + 1:3d}. [{date_str}] ({workspace}) {size_str:>5} {label}" + # Clip to terminal width (use visible length to account for ANSI codes) + if visible_len(line) > width: + line = clip_to_visible_length(line, width - 1) + print(line) + + +def show_session(session: SessionInfo) -> None: + """Print a full conversation.""" + title = session.get("title") or "(untitled)" + date_str = format_timestamp(session.get("creation_date")) + workspace = session.get("workspace", "?") + model = session.get("model", "?") + session_id = session.get("session_id", "?") + + if use_color: + print(f"Session: {Fore.CYAN}{title}{Style.RESET_ALL}") + print(f" ID: {Fore.YELLOW}{session_id}{Style.RESET_ALL}") + print(f" Date: {Fore.YELLOW}{date_str}{Style.RESET_ALL}") + print(f" Workspace: {Fore.MAGENTA}{workspace}{Style.RESET_ALL}") + print(f" Model: {Fore.GREEN}{model}{Style.RESET_ALL}") + print( + f" Messages: {Fore.CYAN}{len(session.get('requests', []))}{Style.RESET_ALL}" + ) + else: + print(f"Session: {title}") + print(f" ID: {session_id}") + print(f" Date: {date_str}") + print(f" Workspace: {workspace}") + print(f" Model: {model}") + print(f" Messages: {len(session.get('requests', []))}") + print("=" * 72) + + for req in session.get("requests", []): + ts = format_timestamp(req.get("timestamp")) + model_id = req.get("model", "") + model_short = model_id.split("/")[-1] if "/" in model_id else model_id + model_state = req.get("model_state") + + user_text = req.get("user", "") + assistant_text = req.get("assistant", "") + thinking = req.get("thinking", "") + tools = req.get("tools", []) + + cancelled = model_state == 4 + status = " (cancelled)" if cancelled else "" + + if use_color: + print( + f"\n--- [{Fore.YELLOW}{ts}{Style.RESET_ALL}]{Fore.YELLOW}{status}{Style.RESET_ALL} ---" + ) + print(f"\n{Fore.CYAN}YOU{Style.RESET_ALL}: {user_text}") + + if thinking: + # Preserve paragraph structure while indenting + lines = thinking.split("\n") + indented_lines = [" " + line for line in lines] + print( + f"\n{Fore.MAGENTA}{Style.RESET_ALL}\n" + + "\n".join(indented_lines) + + f"\n{Fore.MAGENTA}{Style.RESET_ALL}" + ) + + if tools: + for tool_cmd in tools: + if tool_cmd.startswith("["): + print(f"\n {Fore.GREEN}{tool_cmd}{Style.RESET_ALL}") + else: + print(f"\n {Fore.GREEN}${Style.RESET_ALL} {tool_cmd}") + + if assistant_text: + print( + f"\n{Fore.CYAN}COPILOT{Style.RESET_ALL} ({Fore.GREEN}{model_short}{Style.RESET_ALL}):\n{assistant_text}" + ) + elif tools and not cancelled: + print( + f"\n{Fore.CYAN}COPILOT{Style.RESET_ALL} ({Fore.GREEN}{model_short}{Style.RESET_ALL}): ({len(tools)} tool call(s), no text response)" + ) + else: + print(f"\n--- [{ts}]{status} ---") + print(f"\nYOU: {user_text}") + + if thinking: + # Preserve paragraph structure while indenting + lines = thinking.split("\n") + indented_lines = [" " + line for line in lines] + print(f"\n\n" + "\n".join(indented_lines) + "\n") + + if tools: + for tool_cmd in tools: + if tool_cmd.startswith("["): + print(f"\n {tool_cmd}") + else: + print(f"\n $ {tool_cmd}") + + if assistant_text: + print(f"\nCOPILOT ({model_short}):\n{assistant_text}") + elif tools and not cancelled: + print( + f"\nCOPILOT ({model_short}): ({len(tools)} tool call(s), no text response)" + ) + + print() + + +def search_sessions( + sessions: list[SessionInfo], query: str, term_width: int | None = None +) -> None: + """Search all sessions for messages containing query text. + + Note: Search includes only user and assistant messages, not thinking or tool calls. + """ + query_lower = query.lower() + hits = 0 + width = term_width if term_width is not None else 999999 + for i, s in enumerate(sessions): + for req in s.get("requests", []): + user = req.get("user", "") + assistant = req.get("assistant", "") + if query_lower in user.lower() or query_lower in assistant.lower(): + title = s.get("title") or "(untitled)" + title = title.replace("\n", " ").replace("\r", "") + date_str = format_timestamp(s.get("creation_date")) + workspace = s.get("workspace", "?") + if use_color: + line1 = ( + f"\n{Fore.CYAN}{i + 1:3d}{Style.RESET_ALL}. " + f"[{Fore.YELLOW}{date_str}{Style.RESET_ALL}] " + f"({Fore.MAGENTA}{workspace}{Style.RESET_ALL}) " + f"{Fore.GREEN}{title}{Style.RESET_ALL}" + ) + else: + line1 = f"\n{i + 1}. [{date_str}] ({workspace}) {title}" + if visible_len(line1) > width: + line1 = clip_to_visible_length(line1, width - 1) + print(line1) + # Show the matching message snippet + for text, label in [(user, "YOU"), (assistant, "COPILOT")]: + idx = text.lower().find(query_lower) + if idx >= 0: + # Extract enough to fill the line width around the match + # Account for prefix length to leave room for: " YOU/COPILOT: " + prefix_len = len(" YOU: ") # rough estimate + available = max( + 40, width - prefix_len - 10 + ) # -10 for ANSI codes + half_avail = available // 2 + # Compute initial start/end with match centered + start = max(0, idx - half_avail) + end = min(len(text), idx + len(query) + half_avail) + # If we hit a boundary, use the extra space on the other side + left_unused = idx - start + right_unused = end - (idx + len(query)) + if start == 0: + end = min(len(text), end + left_unused) + elif end == len(text): + start = max(0, start - right_unused) + snippet = text[start:end].replace("\n", " ") + has_start_ellipsis = start > 0 + has_end_ellipsis = end < len(text) + + # Highlight the query in the snippet + snippet = highlight_query(snippet, query) + + if has_start_ellipsis: + snippet = "..." + snippet + if has_end_ellipsis: + snippet = snippet + "..." + + if use_color: + prefix = f" {Fore.CYAN}{label}{Style.RESET_ALL}: " + else: + prefix = f" {label}: " + + line2 = prefix + snippet + # Clip to terminal width using visible length, preserving trailing "..." + if visible_len(line2) > width: + line2 = clip_to_visible_length(line2, width - 4) + "..." + print(line2) + hits += 1 + if hits == 0: + print(f"No messages found matching '{query}'.") + else: + print(f"\n{hits} match(es) found.") + + +def get_default_pager() -> str | None: + """Determine the pager, using the same fallback chain as git.""" + # 1. git config core.pager + try: + result = subprocess.run( + ["git", "config", "--get", "core.pager"], + capture_output=True, + text=True, + ) + if result.returncode == 0 and result.stdout.strip(): + return result.stdout.strip() + except FileNotFoundError: + pass + # 2. GIT_PAGER env + if pager := os.environ.get("GIT_PAGER"): + return pager + # 3. PAGER env + if pager := os.environ.get("PAGER"): + return pager + # 4. Platform default: less on Unix, built-in on Windows. + if sys.platform != "win32": + return "less" + return None + + +def _read_one_key() -> str: + """Read a single keypress without echo. Returns the character, or '' for + unrecognised special keys (e.g. arrow keys on Windows).""" + # Platform-specific imports are inside the function because msvcrt is + # Windows-only and termios/tty/select are Unix-only. + if sys.platform == "win32": + import msvcrt + + ch = msvcrt.getwch() + if ch in ("\x00", "\xe0"): # start of a two-byte special key + msvcrt.getwch() # discard second byte + return "" + return ch + else: + import select + import termios + import tty + + fd = sys.stdin.fileno() + old = termios.tcgetattr(fd) + try: + tty.setraw(fd) + ch = sys.stdin.read(1) + # Drain the rest of any escape sequence (e.g. arrow keys). + if ch == "\x1b": + while select.select([sys.stdin], [], [], 0.05)[0]: + sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old) + return ch + + +@contextlib.contextmanager +def builtin_pager() -> Iterator[None]: + """Built-in forward-only pager: Space=next page, Enter=next line, q=quit.""" + if not sys.stdout.isatty(): + yield + return + + buf = io.StringIO() + old_stdout = sys.stdout + sys.stdout = buf # type: ignore[assignment] + try: + yield + finally: + sys.stdout = old_stdout + + output = buf.getvalue() + lines = output.splitlines(keepends=True) + page_size = max(1, shutil.get_terminal_size().lines - 1) + + if len(lines) <= page_size: + old_stdout.write(output) + old_stdout.flush() + return + + # Show first page. + pos = min(page_size, len(lines)) + old_stdout.write("".join(lines[:pos])) + old_stdout.flush() + + prompt = "--More-- (Space=page, Enter=line, q=quit) " + while pos < len(lines): + old_stdout.write(prompt) + old_stdout.flush() + key = _read_one_key() + # Erase the prompt line. + old_stdout.write("\r" + " " * len(prompt) + "\r") + old_stdout.flush() + if key in ("q", "Q", "\x1b", "\x03"): # q, Q, ESC, Ctrl-C + break + elif key in ("\r", "\n"): # Enter — one more line + old_stdout.write(lines[pos]) + old_stdout.flush() + pos += 1 + else: # Space or anything else — next full page + end = min(pos + page_size, len(lines)) + old_stdout.write("".join(lines[pos:end])) + old_stdout.flush() + pos = end + + +@contextlib.contextmanager +def smart_pager(pager_cmd: str) -> Iterator[None]: + """Pipe stdout directly through an external pager process. + + For ``less``, LESS=FRX causes it to exit automatically when all output + fits on one screen. + """ + if not sys.stdout.isatty(): + yield + return + + env = os.environ.copy() + # less: quit-if-one-screen, raw-control-chars, no-init + env.setdefault("LESS", "FRX") + try: + proc = subprocess.Popen( + shlex.split(pager_cmd), + shell=False, + stdin=subprocess.PIPE, + encoding="utf-8", + errors="replace", + env=env, + ) + except OSError: + yield + return + + old_stdout = sys.stdout + sys.stdout = proc.stdin # type: ignore[assignment] + try: + yield + except BrokenPipeError: + pass + finally: + sys.stdout = old_stdout + try: + proc.stdin.close() # type: ignore[union-attr] + except OSError: + pass + proc.wait() + + +def main() -> None: + parser = argparse.ArgumentParser(description="Browse VS Code Copilot chat sessions") + parser.add_argument( + "session", + nargs="?", + help="Session ID or list index to view in full", + ) + parser.add_argument( + "-n", + type=int, + default=None, + help="Number of recent sessions to list", + ) + parser.add_argument( + "-s", + "--search", + type=str, + default=None, + help="Search messages for text", + ) + parser.add_argument( + "-a", + "--all", + action="store_true", + default=False, + help="Include empty sessions in the listing", + ) + parser.add_argument( + "--pager", + type=str, + default=None, + help="Pager command (default: from git config, then $GIT_PAGER, $PAGER, built-in)", + ) + parser.add_argument( + "--no-pager", + action="store_true", + default=False, + help="Disable pager", + ) + parser.add_argument( + "--color", + type=str, + choices=["always", "never", "auto"], + default="auto", + help="When to use color (always, never, auto)", + ) + args = parser.parse_args() + + # Initialize colorama with autoreset disabled so we can use explicit Style.RESET_ALL + # Use strip=False to preserve ANSI codes even when piped (caller can strip if needed) + init(autoreset=False, strip=False) + global use_color + use_color = should_use_color(args) + + explicit_pager = args.pager + configured_pager = ( + explicit_pager if explicit_pager is not None else get_default_pager() + ) + + # For search, we need full parsing; for everything else, metadata suffices. + need_full = args.search is not None + # Pass limit so load_all_sessions can skip parsing old files when -n is set. + listing_limit = args.n if not need_full and not args.session else None + sessions = load_all_sessions(metadata_only=not need_full, limit=listing_limit) + if not sessions: + if use_color: + print(f"{Fore.RED}No chat sessions found.{Style.RESET_ALL}") + else: + print("No chat sessions found.") + return + + use_pager = not args.no_pager + if not use_pager: + ctx: contextlib.AbstractContextManager[None] = contextlib.nullcontext() + elif configured_pager is not None: + ctx = smart_pager(configured_pager) + else: + ctx = builtin_pager() + + # Always get terminal width for reasonable snippet extraction and display + # Only used for clipping if stdout is a TTY or using pager + term_width = get_terminal_width() + + with ctx: + if args.search: + search_sessions(sessions, args.search, term_width=term_width) + return + + if args.session: + # Try as a list index first + try: + idx = int(args.session) - 1 + if 0 <= idx < len(sessions): + full = load_session_by_path(sessions[idx]["path"]) + show_session(full or sessions[idx]) + return + except ValueError: + pass + # Try as a session ID + for s in sessions: + if s.get("session_id") == args.session: + full = load_session_by_path(s["path"]) + show_session(full or s) + return + print(f"Session not found: {args.session}") + return + + n_empty = sum(1 for s in sessions if not s.get("requests")) + if listing_limit: + # With -n, we only parsed a subset, so don't report total counts. + print() + elif use_color: + if n_empty: + print( + f"Found {Fore.CYAN}{len(sessions)}{Style.RESET_ALL} chat session(s), " + f"{Fore.YELLOW}{n_empty}{Style.RESET_ALL} empty:\n" + ) + else: + print( + f"Found {Fore.CYAN}{len(sessions)}{Style.RESET_ALL} chat session(s):\n" + ) + else: + if n_empty: + print(f"Found {len(sessions)} chat session(s), {n_empty} empty:\n") + else: + print(f"Found {len(sessions)} chat session(s):\n") + list_sessions(sessions, args.n, show_all=args.all, term_width=term_width) + if use_color: + print( + f"\nUse: {Fore.CYAN}python {sys.argv[0]} {Style.RESET_ALL} " + f"to view a session" + ) + else: + print(f"\nUse: python {sys.argv[0]} to view a session") + + +if __name__ == "__main__": + main() diff --git a/tools/get_keys.py b/tools/get_keys.py index c981024a..cf69336b 100644 --- a/tools/get_keys.py +++ b/tools/get_keys.py @@ -649,8 +649,7 @@ async def pull_secrets(): def print_help(): """Print help message.""" - print( - """ + print(""" Usage: get_keys.py [command] [options] Commands: @@ -667,8 +666,7 @@ def print_help(): python get_keys.py pull # Pull secrets python get_keys.py push # Push secrets python get_keys.py pull --vault my-vault # Pull from specific vault -""" - ) +""") async def main(): diff --git a/tools/ingest_email.py b/tools/ingest_email.py index 59df9664..eccac4cb 100644 --- a/tools/ingest_email.py +++ b/tools/ingest_email.py @@ -11,27 +11,37 @@ Usage: python tools/ingest_email.py -d email.db inbox_dump/ python tools/ingest_email.py -d email.db message1.eml message2.eml - python query.py --database email.db --query "What was discussed?" + python tools/ingest_email.py -d email.db inbox_dump/ --start-date 2023-01-01 --stop-date 2023-02-01 + python tools/ingest_email.py -d email.db inbox_dump/ --offset 10 --limit 5 + + python tools/query.py --database email.db --query "What was discussed?" """ """ TODO -- Catch auth errors and stop rather than marking as failed - Collect knowledge outside db transaction to reduce lock time """ import argparse import asyncio +from datetime import datetime from pathlib import Path import sys import time import traceback +from typing import Iterable + +from dotenv import load_dotenv import openai from typeagent.aitools import utils -from typeagent.emails.email_import import decode_encoded_words, import_email_from_file +from typeagent.emails.email_import import ( + decode_encoded_words, + email_matches_date_filter, + import_email_from_file, +) from typeagent.emails.email_memory import EmailMemory from typeagent.emails.email_message import EmailMessage from typeagent.knowpro.convsettings import ConversationSettings @@ -41,14 +51,36 @@ def create_arg_parser() -> argparse.ArgumentParser: """Create argument parser for the email ingestion tool.""" parser = argparse.ArgumentParser( - description="Ingest email (.eml) files into a database for querying", + description="Ingest email (.eml) files into a database for querying.", formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=( + "filter pipeline:\n" + " 1. --offset/--limit slice the input file list.\n" + " 2. Already-ingested emails are always skipped.\n" + " 3. --start-date/--stop-date narrow the date range (combinable).\n" + "\n" + "examples:\n" + " # Ingest all .eml files in a directory\n" + " python tools/ingest_email.py -d mail.db inbox/\n" + "\n" + " # Ingest only January 2024 emails\n" + " python tools/ingest_email.py -d mail.db inbox/ " + "--start-date 2024-01-01 --stop-date 2024-02-01\n" + "\n" + " # Ingest the first 20 matching emails\n" + " python tools/ingest_email.py -d mail.db inbox/ --limit 20\n" + "\n" + " # Skip the first 100, then ingest the next 50\n" + " python tools/ingest_email.py -d mail.db inbox/ " + "--offset 100 --limit 50\n" + ), ) parser.add_argument( "paths", nargs="+", - help="Path to one or more .eml files or directories containing .eml files", + metavar="PATH", + help="One or more .eml files or directories containing .eml files", ) parser.add_argument( @@ -62,10 +94,80 @@ def create_arg_parser() -> argparse.ArgumentParser: "-v", "--verbose", action="store_true", help="Show verbose/debug output" ) + # Date filters + parser.add_argument( + "--start-date", + metavar="DATE", + help=( + "Only include emails dated on or after DATE (YYYY-MM-DD, " + "interpreted as local midnight). Combinable with --stop-date." + ), + ) + parser.add_argument( + "--stop-date", + metavar="DATE", + help=( + "Only include emails dated before DATE (YYYY-MM-DD, exclusive " + "upper bound, local midnight). Combinable with --start-date." + ), + ) + + # Pagination + parser.add_argument( + "--offset", + type=int, + default=0, + metavar="N", + help=( + "Skip the first N files in the input list " + "(applied before any other filtering). Default: 0." + ), + ) + parser.add_argument( + "--limit", + type=int, + default=None, + metavar="N", + help=( + "Process at most N files from the input list " + "(applied before any other filtering). Default: no limit." + ), + ) + return parser -def collect_email_files(paths: list[str], verbose: bool) -> list[Path]: +def _validate_args(args: argparse.Namespace) -> None: + """Validate argument combinations and exit on error.""" + errors: list[str] = [] + + # --offset must be non-negative + if args.offset < 0: + errors.append("--offset must be a non-negative integer.") + + # --limit must be positive when given + if args.limit is not None and args.limit <= 0: + errors.append("--limit must be a positive integer.") + + # --offset without --limit is allowed (skip first N, ingest the rest) + # --limit without --offset is allowed (ingest at most N) + + # --start-date must be before --stop-date when both are given + if args.start_date and args.stop_date: + start = _parse_date(args.start_date) + stop = _parse_date(args.stop_date) + if start >= stop: + errors.append( + f"--start-date ({args.start_date}) must be earlier than --stop-date ({args.stop_date})." + ) + + if errors: + for err in errors: + print(f"Error: {err}", file=sys.stderr) + sys.exit(2) + + +def collect_eml_files(paths: list[str], verbose: bool) -> list[Path]: """Collect all .eml files from the given paths (files or directories).""" email_files: list[Path] = [] @@ -79,7 +181,7 @@ def collect_email_files(paths: list[str], verbose: bool) -> list[Path]: if path.suffix.lower() == ".eml": email_files.append(path) else: - print(f"Error: Skipping non-.eml file: {path}", file=sys.stderr) + print(f"Error: Not an .eml file: {path}", file=sys.stderr) sys.exit(1) elif path.is_dir(): eml_files = sorted(path.glob("*.eml")) @@ -93,28 +195,93 @@ def collect_email_files(paths: list[str], verbose: bool) -> list[Path]: return email_files -async def ingest_emails( - paths: list[str], - database: str, - verbose: bool = False, -) -> None: - """Ingest email files into a database.""" +def _parse_date(date_str: str) -> datetime: + """Parse a YYYY-MM-DD string into a timezone-aware datetime. - # Collect all .eml files - with utils.timelog("Collecting email files"): - email_files = collect_email_files(paths, verbose) + The date is interpreted as 00:00:00 in the local timezone, so that + ``--start-date 2024-01-15`` means the start of that day locally. + """ + try: + # astimezone() on a naive datetime assumes local time (Python 3.6+) + return datetime.strptime(date_str, "%Y-%m-%d").astimezone() + except ValueError: + print( + f"Error: Invalid date format '{date_str}'. Use YYYY-MM-DD.", + file=sys.stderr, + ) + sys.exit(1) + +def _iter_emails( + eml_paths: list[str], + verbose: bool, + offset: int = 0, + limit: int | None = None, +) -> Iterable[tuple[str, Path, str]]: + """Yield (source_id, file_path, label) from the given .eml paths. + + *offset* and *limit* slice the collected file list (like + ``files[offset:offset+limit]``) before anything else happens. + Does NOT parse the files; the caller imports only the emails it needs. + """ + with utils.timelog("Collecting .eml files"): + email_files = collect_eml_files(eml_paths, verbose) if not email_files: print("Error: No .eml files found", file=sys.stderr) sys.exit(1) - + total = len(email_files) if verbose: - print(f"Found {len(email_files)} email files in total to ingest") + print(f"Found {total} .eml files") + end = offset + limit if limit is not None else None + email_files = email_files[offset:end] + if verbose and (offset or limit is not None): + print(f"After --offset={offset} --limit={limit}: {len(email_files)} files") + sliced_total = len(email_files) + for i, email_file in enumerate(email_files): + label = f"[{i + 1}/{sliced_total}] {email_file}" + yield str(email_file), email_file, label + + +def _print_email_verbose(email: EmailMessage) -> None: + """Print verbose details for an email.""" + print(f" From: {decode_encoded_words(email.metadata.sender)}") + if email.metadata.recipients: + print( + f" To: {', '.join(decode_encoded_words(r) for r in email.metadata.recipients)}" + ) + if email.metadata.cc: + print( + f" Cc: {', '.join(decode_encoded_words(r) for r in email.metadata.cc)}" + ) + if email.metadata.subject: + print( + f" Subject: {decode_encoded_words(email.metadata.subject).replace('\n', '\\n')}" + ) + print(f" Date: {email.timestamp}") + print(f" Body chunks: {len(email.text_chunks)}") + MAIL_PREVIEW_LEN = 80 + for chunk in email.text_chunks: + preview = repr(chunk[: MAIL_PREVIEW_LEN + 1])[1:-1] + if len(preview) > MAIL_PREVIEW_LEN: + preview = preview[: MAIL_PREVIEW_LEN - 3] + "..." + print(f" {preview}") + + +async def ingest_emails( + eml_paths: list[str], + database: str, + verbose: bool = False, + start_date: datetime | None = None, + stop_date: datetime | None = None, + offset: int = 0, + limit: int | None = None, +) -> None: + """Ingest email files into a database.""" # Load environment for model API access if verbose: print("Loading environment...") - utils.load_dotenv() + load_dotenv() # Create conversation settings and storage provider if verbose: @@ -134,12 +301,9 @@ async def ingest_emails( if verbose: print(f"Target database: {database}") - batch_size = settings.semantic_ref_index_settings.batch_size - if verbose: - print(f"Batch size: {batch_size}") - - # Parse and import emails + concurrency = settings.semantic_ref_index_settings.concurrency if verbose: + print(f"Concurrency: {concurrency}") print("\nParsing and importing emails...") success_count = 0 @@ -150,65 +314,29 @@ async def ingest_emails( semref_coll = await settings.storage_provider.get_semantic_ref_collection() storage_provider = settings.storage_provider - for i, email_file in enumerate(email_files): + for source_id, email_file, label in _iter_emails(eml_paths, verbose, offset, limit): try: if verbose: - print(f"[{i + 1}/{len(email_files)}] {email_file}", end="", flush=True) - if status := storage_provider.get_source_status(str(email_file)): - skipped_count += 1 - if verbose: - print(f" [Previously {status}, skipping]") - continue - else: - if verbose: - print() + print(label, end="", flush=True) + # Parse the email only after confirming it hasn't been ingested email = import_email_from_file(str(email_file)) - source_id = email.metadata.id - if verbose: - print(f" Email ID: {source_id}", end="") - # Check if this email was already ingested - if source_id and (status := storage_provider.get_source_status(source_id)): + # Apply date filter + if not email_matches_date_filter(email.timestamp, start_date, stop_date): skipped_count += 1 if verbose: - print(f" [Previously {status}, skipping]") - async with storage_provider: - storage_provider.mark_source_ingested(str(email_file), status) + print(" [Outside date range, skipping]") continue - else: - if verbose: - print() if verbose: - print(f" From: {decode_encoded_words(email.metadata.sender)}") - if email.metadata.recipients: - print( - f" To: {', '.join(decode_encoded_words(r) for r in email.metadata.recipients)}" - ) - if email.metadata.cc: - print( - f" Cc: {', '.join(decode_encoded_words(r) for r in email.metadata.cc)}" - ) - if email.metadata.subject: - print( - f" Subject: {decode_encoded_words(email.metadata.subject).replace('\n', '\\n')}" - ) - print(f" Date: {email.timestamp}") - print(f" Body chunks: {len(email.text_chunks)}") - for chunk in email.text_chunks: - # Show first N chars of each decoded chunk - N = 150 - preview = repr(chunk[: N + 1])[1:-1] - if len(preview) > N: - preview = preview[: N - 3] + "..." - print(f" {preview}") - - # Pass source_id to mark as ingested atomically with the message + _print_email_verbose(email) + + # Ingest the email try: await email_memory.add_messages_with_indexing( - [email], source_ids=[str(email_file)] - ) # This may raise, esp. if the knowledge extraction fails (see except below) + [email], source_ids=[source_id] + ) success_count += 1 except openai.AuthenticationError as e: if verbose: @@ -216,11 +344,11 @@ async def ingest_emails( sys.exit(f"Authentication error: {e!r}") # Print progress periodically - if (success_count + failed_count) % batch_size == 0: + if concurrency and (success_count + failed_count) % concurrency == 0: elapsed = time.time() - start_time semref_count = await semref_coll.size() print( - f"\n[{i + 1}/{len(email_files)}] " + f"\n{label} " f"{success_count} imported | " f"{failed_count} failed | " f"{skipped_count} skipped | " @@ -230,12 +358,15 @@ async def ingest_emails( except Exception as e: failed_count += 1 - print(f"Error processing {email_file}: {e!r:.150s}", file=sys.stderr) + print( + f"Error processing {source_id}: {e!r:.150s}", + file=sys.stderr, + ) mod = e.__class__.__module__ qual = e.__class__.__qualname__ exc_name = qual if mod == "builtins" else f"{mod}.{qual}" async with storage_provider: - storage_provider.mark_source_ingested(str(email_file), exc_name) + await storage_provider.mark_source_ingested(source_id, exc_name) if verbose: traceback.print_exc(limit=10) @@ -274,12 +405,20 @@ def main() -> None: """Main entry point.""" parser = create_arg_parser() args = parser.parse_args() + _validate_args(args) + + start_date = _parse_date(args.start_date) if args.start_date else None + stop_date = _parse_date(args.stop_date) if args.stop_date else None asyncio.run( ingest_emails( - paths=args.paths, + eml_paths=args.paths, database=args.database, verbose=args.verbose, + start_date=start_date, + stop_date=stop_date, + offset=args.offset, + limit=args.limit, ) ) diff --git a/tools/ingest_podcast.py b/tools/ingest_podcast.py index 6ff9cdce..c0f7303d 100644 --- a/tools/ingest_podcast.py +++ b/tools/ingest_podcast.py @@ -2,7 +2,8 @@ import asyncio import os -from typeagent.aitools.utils import load_dotenv +from dotenv import load_dotenv + from typeagent.knowpro.convsettings import ConversationSettings from typeagent.podcasts.podcast_ingest import ingest_podcast @@ -30,7 +31,13 @@ async def main(): "--batch-size", type=int, default=10, - help="Batch size for message indexing (default 10)", + help="Number of messages per indexing call (default 10)", + ) + parser.add_argument( + "--concurrency", + type=int, + default=0, + help="Max concurrent knowledge extractions (0 = use settings default)", ) parser.add_argument( "--start-message", @@ -74,6 +81,7 @@ async def main(): dbname=args.database, batch_size=args.batch_size, start_message=args.start_message, + concurrency=args.concurrency, verbose=not args.quiet, ) except (RuntimeError, ValueError) as err: diff --git a/tools/ingest_vtt.py b/tools/ingest_vtt.py index cdad39fc..ffaccfc1 100644 --- a/tools/ingest_vtt.py +++ b/tools/ingest_vtt.py @@ -21,10 +21,10 @@ import sys import time +from dotenv import load_dotenv import webvtt -from typeagent.aitools import utils -from typeagent.aitools.embeddings import AsyncEmbeddingModel +from typeagent.aitools.model_adapters import create_embedding_model from typeagent.knowpro.convsettings import ConversationSettings from typeagent.knowpro.interfaces import ConversationMetadata from typeagent.knowpro.universal_message import format_timestamp_utc, UNIX_EPOCH @@ -75,10 +75,10 @@ def create_arg_parser() -> argparse.ArgumentParser: ) parser.add_argument( - "--batchsize", + "--concurrency", type=int, default=None, - help="Batch size for knowledge extraction (default: from settings)", + help="Max concurrent knowledge extractions (default: from settings)", ) parser.add_argument( @@ -132,7 +132,7 @@ async def ingest_vtt_files( name: str | None = None, merge_consecutive: bool = False, verbose: bool = False, - batchsize: int | None = None, + concurrency: int | None = None, embedding_name: str | None = None, ) -> None: """Ingest one or more VTT files into a database.""" @@ -190,7 +190,7 @@ async def ingest_vtt_files( # Load environment for API access if verbose: print("Loading environment...") - utils.load_dotenv() + load_dotenv() # Determine transcript name before creating storage provider if not name: @@ -203,7 +203,10 @@ async def ingest_vtt_files( if verbose: print("Setting up conversation settings...") try: - embedding_model = AsyncEmbeddingModel(model_name=embedding_name) + spec = embedding_name + if spec and ":" not in spec: + spec = f"openai:{spec}" + embedding_model = create_embedding_model(spec) settings = ConversationSettings(embedding_model) # Create metadata with the conversation name @@ -224,9 +227,9 @@ async def ingest_vtt_files( # Update settings to use our storage provider settings.storage_provider = storage_provider - # Override batch size if specified - if batchsize is not None: - settings.semantic_ref_index_settings.batch_size = batchsize + # Override concurrency if specified + if concurrency is not None: + settings.semantic_ref_index_settings.concurrency = concurrency if verbose: print("Settings and storage provider configured") @@ -365,7 +368,7 @@ def save_current_message(): f" auto_extract_knowledge = {settings.semantic_ref_index_settings.auto_extract_knowledge}" ) print( - f" batch_size = {settings.semantic_ref_index_settings.batch_size}" + f" concurrency = {settings.semantic_ref_index_settings.concurrency}" ) # Create a Transcript object @@ -375,13 +378,14 @@ def save_current_message(): tags=[name, "vtt-transcript"], ) - # Process messages in batches - batch_size = settings.semantic_ref_index_settings.batch_size + # Process messages in batches for recoverability + batch_size = 50 successful_count = 0 start_time = time.time() print( - f" Processing {len(all_messages)} messages in batches of {batch_size}..." + f" Processing {len(all_messages)} messages" + f" (concurrency={settings.semantic_ref_index_settings.concurrency})..." ) for i in range(0, len(all_messages), batch_size): @@ -446,7 +450,7 @@ def main(): database=args.database, name=args.name, merge_consecutive=args.merge, - batchsize=args.batchsize, + concurrency=args.concurrency, embedding_name=args.embedding_name, verbose=args.verbose, ) diff --git a/tools/load_json.py b/tools/load_json.py index 573b4a79..8a885047 100644 --- a/tools/load_json.py +++ b/tools/load_json.py @@ -19,6 +19,8 @@ import asyncio import os +from dotenv import load_dotenv + from typeagent.aitools import utils from typeagent.knowpro.convsettings import ConversationSettings from typeagent.podcasts import podcast @@ -122,7 +124,7 @@ def main(): ) # Load environment variables for API access - utils.load_dotenv() + load_dotenv() # Run the loading process asyncio.run(load_json_to_database(args.index_path, args.database, args.verbose)) diff --git a/tools/mail/README.md b/tools/mail/README.md new file mode 100644 index 00000000..a9551e79 --- /dev/null +++ b/tools/mail/README.md @@ -0,0 +1,229 @@ +# Mail Dump Tools + +## Gmail (`gmail_dump.py`) + +Downloads Gmail messages as `.eml` files via the Gmail API. + +### Prerequisites + +- Python 3.12+ +- A Google Cloud project with the Gmail API enabled +- OAuth 2.0 client credentials (`client_secret.json`) + +### Google Cloud Project Setup + +1. Go to [console.cloud.google.com](https://console.cloud.google.com/) +2. Create a new project (or select an existing one) +3. Go to **APIs & Services** → **Library** +4. Search for **Gmail API** and click **Enable** + +#### Create OAuth 2.0 credentials + +1. Go to **APIs & Services** → **Credentials** +2. Click **Create Credentials** → **OAuth client ID** +3. If prompted, configure the **OAuth consent screen** first: + - Choose **External** (or **Internal** for organization-only) + - Fill in the required fields (app name, email) + - Add the scope `https://www.googleapis.com/auth/gmail.readonly` + - Add your email as a test user (required for External apps in testing mode) +4. Back in **Credentials**, select **Desktop app** as the application type +5. Click **Create** and download the JSON file +6. Save it as `client_secret.json` in your credentials directory + +### Configuration + +Place `client_secret.json` in the directory you pass via `--creds-dir` +(defaults to the current directory). On first run the tool opens a browser +for OAuth consent and saves the resulting token as `token.json` in the same +directory. Subsequent runs reuse the cached token. + +### Usage + +```bash +# Download 50 most recent messages +python tools/mail/gmail_dump.py + +# Download 200 messages +python tools/mail/gmail_dump.py --max-results 200 + +# Filter messages with a Gmail search query +python tools/mail/gmail_dump.py --query "from:alice@example.com" + +# Specify a custom credentials directory +python tools/mail/gmail_dump.py --creds-dir ~/gmail-creds + +# Specify a custom output directory +python tools/mail/gmail_dump.py --output-dir ~/my-emails +``` + +### Command-line flags + +| Flag | Description | Default | +|---|---|---| +| `--max-results` | Max messages to download | `50` | +| `--output-dir` | Output directory for `.eml` files | `mail_dump` | +| `--query` | Gmail search query (same syntax as the Gmail search bar) | _(all messages)_ | +| `--creds-dir` | Directory containing `client_secret.json` and `token.json` | `.` (current dir) | + +> **Note:** Messages are saved as `{message_id}.eml` (using the Gmail message +> ID as filename), unlike the Outlook tool which uses sequential numbering. + +## Outlook (`outlook_dump.py`) + +Downloads Outlook emails as `.eml` files via the Microsoft Graph API. + +### Prerequisites + +- Python 3.12+ +- An Azure AD (Microsoft Entra ID) app registration + +### Azure AD App Registration Setup + +1. Go to [portal.azure.com](https://portal.azure.com) → **Microsoft Entra ID** → **App registrations** +2. Click **New registration** (or open an existing app) +3. Set a name (e.g. `outlook-mail-dump`) and click **Register** +4. Note the **Application (client) ID** (a GUID) — this is your `--application-client-id` +5. Note the **Directory (tenant) ID** — this is your `--tenant-id` + +#### Add a redirect URI (required for interactive browser auth) + +1. In your app registration, go to **Authentication** +2. Click **Add a platform** → **Mobile and desktop applications** +3. Check **`http://localhost`** +4. Click **Configure** / **Save** + +> **Tip:** If you cannot add a redirect URI (e.g. on a headless server), use +> `--device-code` instead — it does not require a redirect URI. + +#### Add API permissions + +1. In your app registration, go to **API permissions** +2. Click **Add a permission** → **Microsoft Graph** → **Delegated permissions** +3. Search for **`Mail.Read`** and add it +4. Click **Grant admin consent** if required by your organization + +### Configuration + +The tool reads defaults from a `.env` file in the project root. +Add these variables: + +```env +# User email address for login (pre-fills the sign-in page) +OUTLOOK_CLIENT_ID=user@example.onmicrosoft.com + +# Azure AD app registration client ID (GUID) +OUTLOOK_APPLICATION_CLIENT_ID=... + +# Azure AD tenant ID (GUID) +OUTLOOK_TENANT_ID=... +``` + +All values can also be overridden via command-line flags. + +> **All three identity parameters are optional.** When `--application-client-id` +> (or `OUTLOOK_APPLICATION_CLIENT_ID`) is not provided, the tool falls back to +> `DefaultAzureCredential`, which automatically picks up ambient credentials in +> this order: +> +> 1. **Azure CLI** — run `az login` first +> 2. **VS Code** — sign in via the Azure Account extension +> 3. **Managed Identity** — on Azure VMs / App Service +> 4. **Environment variables** — `AZURE_CLIENT_ID`, `AZURE_TENANT_ID`, `AZURE_CLIENT_SECRET` +> +> When `--client-id` is omitted the browser sign-in page will not be +> pre-filled (login hint). When `--tenant-id` is omitted it defaults to +> `"common"` (multi-tenant). +> +> This means you can run `python tools/mail/outlook_dump.py` with **zero +> configuration** as long as you are signed into Azure CLI or VS Code. + +### Usage + +```bash +# Download 50 most recent messages (interactive browser auth) +python tools/mail/outlook_dump.py + +# Use device-code flow (no redirect URI needed) +python tools/mail/outlook_dump.py --device-code + +# Download 200 messages +python tools/mail/outlook_dump.py --max-results 200 + +# Filter by sender +python tools/mail/outlook_dump.py \ + --filter "from/emailAddress/address eq 'alice@example.com'" + +# Full-text search (KQL) +python tools/mail/outlook_dump.py --search "subject:quarterly report" + +# Check permissions only +python tools/mail/outlook_dump.py --check-permissions + +# Add Mail.Read to the app registration (requires admin privileges) +python tools/mail/outlook_dump.py --setup-permissions +``` + +### Command-line flags + +| Flag | Description | Env variable | +|---|---|---| +| `--client-id` | User email for login hint | `OUTLOOK_CLIENT_ID` | +| `--application-client-id` | Azure AD app GUID | `OUTLOOK_APPLICATION_CLIENT_ID` | +| `--tenant-id` | Azure AD tenant ID | `OUTLOOK_TENANT_ID` | +| `--max-results` | Max messages to download (default: 50) | — | +| `--output-dir` | Output directory (default: `mail_dump`) | — | +| `--filter` | OData `$filter` expression | — | +| `--search` | KQL `$search` query | — | +| `--device-code` | Use device-code auth flow | — | +| `--check-permissions` | Verify Mail.Read access only | — | +| `--setup-permissions` | Add Mail.Read to app registration | — | + +## Mbox (`mbox_dump.py`) + +Extracts emails from a local or remote `.mbox` file into individual `.eml` +files (numbered `1.eml`, `2.eml`, …). + +### Prerequisites + +- Python 3.12+ +- An `.mbox` file (local or accessible via URL) + +No API keys, OAuth credentials, or cloud projects are required. + +### How to obtain an mbox file + +| Source | How | +|---|---| +| **Gmail** | Google Takeout → select **Mail** → export as `.mbox` | +| **Thunderbird** | ImportExportTools NG add-on → right-click folder → Export as mbox | +| **Apple Mail** | Mailbox → Export Mailbox… | +| **Mailing list archives** | Many lists (e.g. Mailman) offer `.mbox` downloads | + +### Usage + +```bash +# Extract emails from a local mbox file +# Creates a folder named "mailbox/" alongside the file +python tools/mail/mbox_dump.py mailbox.mbox + +# Download an mbox from a URL, then extract +python tools/mail/mbox_dump.py --url https://example.com/archive.mbox + +# Download and save with a custom local filename +python tools/mail/mbox_dump.py --url https://example.com/archive.mbox -o local.mbox + +# Download from URL and also specify the mbox path to extract +python tools/mail/mbox_dump.py archive.mbox --url https://example.com/archive.mbox +``` + +### Command-line flags + +| Flag | Description | Default | +|---|---|---| +| `mbox` (positional) | Path to the local `.mbox` file to extract | _(required unless `--url` is used)_ | +| `--url` | URL to download an `.mbox` file from | — | +| `-o` / `--output` | Local filename for the downloaded mbox (used with `--url`) | derived from URL | + +> **Note:** The output directory is created automatically with the same name +> as the mbox file (without the extension). For example, `archive.mbox` +> produces an `archive/` folder containing `1.eml`, `2.eml`, etc. diff --git a/tools/gmail/gmail_dump.py b/tools/mail/gmail_dump.py similarity index 100% rename from tools/gmail/gmail_dump.py rename to tools/mail/gmail_dump.py diff --git a/tools/mail/mbox_dump.py b/tools/mail/mbox_dump.py new file mode 100644 index 00000000..7ec73374 --- /dev/null +++ b/tools/mail/mbox_dump.py @@ -0,0 +1,130 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Mbox Dump Tool + +Extract emails from an mbox file into individual .eml files. +Creates a folder with the same name as the mbox file (without the .mbox extension) +and places each email as a separate .eml file (currently numbered sequentially). + +Usage: + python tools/mail/mbox_dump.py mailbox.mbox + python tools/mail/mbox_dump.py mailbox.mbox --output-dir ./emails + python tools/mail/mbox_dump.py --url https://example.com/archive.mbox + python tools/mail/mbox_dump.py --url https://example.com/archive.mbox --mbox-dir /tmp + python tools/mail/mbox_dump.py --url https://example.com/archive.mbox --mbox-file local.mbox +""" + +import argparse +import mailbox +from pathlib import Path +import sys +from urllib.parse import urlparse +import urllib.request + + +def dump_mbox(mbox_path: str, output_dir: str | None = None) -> int: + """Extract emails from an mbox file into individual .eml files. + + Args: + mbox_path: Path to the mbox file. + output_dir: Directory to write .eml files to. If None, a directory + with the same name as the mbox file (without extension) is created + alongside the mbox file. + + Returns: + The number of emails extracted. + """ + mbox_file = Path(mbox_path) + if not mbox_file.exists(): + print(f"Error: mbox file not found: {mbox_file}", file=sys.stderr) + sys.exit(1) + + if output_dir is None: + out_path = mbox_file.parent / mbox_file.stem + else: + out_path = Path(output_dir) + + out_path.mkdir(parents=True, exist_ok=True) + + mbox = mailbox.mbox(mbox_path) + count = 0 + for i, message in enumerate(mbox): + eml_path = out_path / f"{i + 1:06d}.eml" + eml_path.write_bytes(message.as_bytes()) + count += 1 + + return count + + +def download_mbox(url: str, output_path: str) -> str: + """Download an mbox file from a URL. + + Args: + url: URL to download the mbox from. + output_path: Path to save the downloaded mbox file. + + Returns: + The path to the downloaded file. + """ + print(f"Downloading {url}...") + urllib.request.urlretrieve(url, output_path) + print(f"Saved to {output_path}") + return output_path + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Extract emails from an mbox file into individual .eml files", + ) + parser.add_argument( + "mbox", + nargs="?", + help="Path to the mbox file to extract", + ) + parser.add_argument( + "--url", + help="URL to download an mbox file from", + ) + parser.add_argument( + "--mbox-dir", + default=".", + help="Directory to store the downloaded mbox file (default: current directory)", + ) + parser.add_argument( + "--mbox-file", + default=None, + help="Filename for the downloaded mbox file (default: filename from the URL)", + ) + parser.add_argument( + "--output-dir", + default="mail_dump", + help="Output directory for .eml files (default: mail_dump)", + ) + args = parser.parse_args() + + if args.url: + if args.mbox_file: + filename = args.mbox_file + else: + url_path = urlparse(args.url).path + filename = Path(url_path).name or "downloaded.mbox" + mbox_path = str(Path(args.mbox_dir) / filename) + download_mbox(args.url, mbox_path) + if args.mbox is None: + args.mbox = mbox_path + + if args.mbox is None: + parser.error("either provide an mbox file path or use --url to download one") + + count = dump_mbox(args.mbox, output_dir=args.output_dir) + out_dir = ( + args.output_dir + if args.output_dir + else str(Path(args.mbox).parent / Path(args.mbox).stem) + ) + print(f"Extracted {count} emails to {out_dir}/") + + +if __name__ == "__main__": + main() diff --git a/tools/mail/mbox_to_emls.py b/tools/mail/mbox_to_emls.py new file mode 100644 index 00000000..54f64711 --- /dev/null +++ b/tools/mail/mbox_to_emls.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Convert an mbox file into a directory of individual .eml files. + +Usage: + python tools/mbox_to_emls.py mailbox.mbox output_dir/ +""" + +import argparse +from email.utils import parsedate_to_datetime +import mailbox +import os +import sys + + +def make_filename(date_header: str | None, used_names: set[str]) -> str: + """ + Generate a unique filename from the Date header. + + Format: YYYYMMDD_HHMMSS.eml, with _NNN suffix for disambiguation. + Falls back to 'unknown_NNNNNN' if no valid date. + """ + base = "unknown" + if date_header: + try: + dt = parsedate_to_datetime(date_header) + base = dt.strftime("%Y%m%d_%H%M%S") + except (ValueError, TypeError): + pass + + # Find a unique name + if base == "unknown": + # For unknown dates, use a 6-digit serial + serial = 0 + while True: + name = f"{base}_{serial:06d}.eml" + if name not in used_names: + used_names.add(name) + return name + serial += 1 + else: + # Try without suffix first + name = f"{base}.eml" + if name not in used_names: + used_names.add(name) + return name + # Add serial suffix for duplicates + serial = 1 + while True: + name = f"{base}_{serial:03d}.eml" + if name not in used_names: + used_names.add(name) + return name + serial += 1 + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Convert an mbox file to a directory of .eml files." + ) + parser.add_argument("input", help="Path to the input .mbox file") + parser.add_argument("output", help="Path to the output directory") + args = parser.parse_args() + + mbox_path = args.input + output_dir = args.output + + if not os.path.isfile(mbox_path): + print(f"Error: Input file not found: {mbox_path}", file=sys.stderr) + return 1 + + os.makedirs(output_dir, exist_ok=True) + + mbox = mailbox.mbox(mbox_path) + used_names: set[str] = set() + count = 0 + + for message in mbox: + date_header = message.get("Date") + filename = make_filename(date_header, used_names) + filepath = os.path.join(output_dir, filename) + + with open(filepath, "wb") as f: + f.write(message.as_bytes()) + + count += 1 + if count % 100 == 0: + print(f"Processed {count} messages...", file=sys.stderr) + + print(f"Wrote {count} .eml files to {output_dir}", file=sys.stderr) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tools/mail/outlook_dump.py b/tools/mail/outlook_dump.py new file mode 100644 index 00000000..5fdb165e --- /dev/null +++ b/tools/mail/outlook_dump.py @@ -0,0 +1,649 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Download Outlook emails as .eml files via Microsoft Graph API. + +Requires an Azure AD app registration with Mail.Read delegated permission. +Uses ``msgraph-sdk`` and ``azure-identity`` for authentication. + +Usage examples:: + + # Download 50 most recent messages (interactive browser auth) + python tools/mail/outlook_dump.py + + # Download with explicit login and app IDs + python tools/mail/outlook_dump.py --client-id user@example.com \ + --application-client-id 6876366c-2635-4058-ae8a-cfbe152fbd4c + + # Download 200 messages using device-code auth + python tools/mail/outlook_dump.py --max-results 200 --device-code + + # Filter messages by sender + python tools/mail/outlook_dump.py \ + --filter "from/emailAddress/address eq 'alice@example.com'" + + # Full-text search (KQL) + python tools/mail/outlook_dump.py --search "subject:quarterly report" + + # Check permissions only + python tools/mail/outlook_dump.py --check-app-reg-permissions + + # Add Mail.Read to the app registration (requires admin) + python tools/mail/outlook_dump.py --setup-permissions +""" + +import argparse +import asyncio +import os +from pathlib import Path +import re +import time +from uuid import UUID + +from azure.identity import ( + DefaultAzureCredential, + DeviceCodeCredential, + InteractiveBrowserCredential, +) +from kiota_abstractions.base_request_configuration import RequestConfiguration +from msgraph.generated.applications.applications_request_builder import ( + ApplicationsRequestBuilder, +) +from msgraph.generated.models.application import Application +from msgraph.generated.models.o_data_errors.o_data_error import ODataError +from msgraph.generated.models.required_resource_access import RequiredResourceAccess +from msgraph.generated.models.resource_access import ResourceAccess +from msgraph.generated.users.item.messages.messages_request_builder import ( # type: ignore[import-not-found] + MessagesRequestBuilder, +) +from msgraph.graph_service_client import GraphServiceClient + +# Delegated scopes requested at sign-in +REQUIRED_SCOPES = ["Mail.Read"] + +# Required delegated permissions to check with --check-app-reg-permissions +REQUIRED_DELEGATED_PERMISSIONS = [ + "Mail.Read", + "User.Read", +] + +# Default output directory +OUT = Path("mail_dump") + +# Well-known Microsoft Graph application ID +GRAPH_APP_ID = "00000003-0000-0000-c000-000000000000" +# Mail.Read *delegated* permission GUID +MAIL_READ_SCOPE_ID = "570282fd-fa5c-430d-a7fd-fc8dc98a9dca" + +type Credential = DefaultAzureCredential | InteractiveBrowserCredential | DeviceCodeCredential + + +# --------------------------------------------------------------------------- +# Authentication +# --------------------------------------------------------------------------- + + +def get_credential( + application_client_id: str | None, + tenant_id: str | None, + use_device_code: bool, + login_hint: str | None = None, +) -> Credential: + """Create an Azure credential for authentication. + + When *application_client_id* is provided, uses interactive browser or + device-code authentication against that specific app registration. + + When *application_client_id* is ``None``, falls back to + ``DefaultAzureCredential`` which tries Azure CLI, VS Code, managed + identity, and other ambient credentials automatically. + """ + if not application_client_id: + print("No --application-client-id specified; using DefaultAzureCredential.") + return DefaultAzureCredential() + if use_device_code: + return DeviceCodeCredential( + client_id=application_client_id, tenant_id=tenant_id + ) + return InteractiveBrowserCredential( + client_id=application_client_id, + tenant_id=tenant_id, + login_hint=login_hint, + ) + + +# --------------------------------------------------------------------------- +# Permission helpers +# --------------------------------------------------------------------------- + + +async def check_permissions(client: GraphServiceClient) -> bool: + """Verify that the authenticated user has Mail.Read access. + + Makes a minimal ``GET /me/messages?$top=1&$select=id`` call. + Returns ``True`` when the call succeeds, ``False`` otherwise. + """ + query_params = MessagesRequestBuilder.MessagesRequestBuilderGetQueryParameters( + top=1, + select=["id"], + ) + config = RequestConfiguration(query_parameters=query_params) + try: + await client.me.messages.get(request_configuration=config) + except ODataError as e: + code = e.error.code if e.error else "Unknown" + message = e.error.message if e.error else str(e) + print(f"Permission check failed ({code}): {message}") + print("Ensure the app has Mail.Read and the user has consented.") + return False + print("Mail.Read permission verified successfully.") + return True + + +async def check_app_registration_permissions( + credential: Credential, application_client_id: str | None +) -> bool: + """Check whether Mail.Read is present in the token's granted scopes. + + Acquires an access token for the Microsoft Graph ``Mail.Read`` scope and + decodes the JWT (without cryptographic verification) to inspect: + + * ``scp`` – delegated scopes actually granted to the token. + * ``appid`` / ``azp`` – confirms the correct application client ID. + * ``tid`` – tenant the token was issued for. + + No admin permissions are required; the token is obtained with the same + credential used for all other operations. Returns ``True`` when + ``Mail.Read`` appears in the granted scopes, ``False`` otherwise. + """ + import base64 + import json + + if not application_client_id: + print("Cannot inspect permissions without --application-client-id.") + return False + + # Acquire a token for the Graph Mail.Read scope + try: + token = credential.get_token("https://graph.microsoft.com/Mail.Read") + except Exception as e: + print(f"Failed to acquire token: {e}") + print("Ensure the app registration has Mail.Read configured and consented.") + return False + + # Decode the JWT payload (no signature verification needed here) + parts = token.token.split(".") + if len(parts) < 2: + print("Access token is not a valid JWT.") + return False + + payload_b64 = parts[1] + # Fix base64 padding + payload_b64 += "=" * (-len(payload_b64) % 4) + try: + payload = json.loads(base64.urlsafe_b64decode(payload_b64)) + except (json.JSONDecodeError, ValueError) as e: + print(f"Failed to decode token payload: {e}") + return False + + # Display token metadata + app_id = payload.get("appid") or payload.get("azp") or "unknown" + tenant_id = payload.get("tid", "unknown") + upn = payload.get("upn") or payload.get("preferred_username") or "unknown" + print(f"Token info:") + print(f" Application client ID: {app_id}") + print(f" Tenant ID: {tenant_id}") + print(f" User: {upn}") + + if app_id != application_client_id: + print( + f" WARNING: token appid '{app_id}' does not match" + f" --application-client-id '{application_client_id}'" + ) + + # Check granted scopes + granted_scopes = set((payload.get("scp") or "").split()) + print(f" Granted scopes: {' '.join(sorted(granted_scopes)) or '(none)'}") + + all_ok = True + for scope in REQUIRED_DELEGATED_PERMISSIONS: + if scope in granted_scopes: + print(f" {scope}: GRANTED") + else: + print(f" {scope}: NOT GRANTED") + all_ok = False + + if not all_ok: + missing = sorted(set(REQUIRED_DELEGATED_PERMISSIONS) - granted_scopes) + print(f"\n Missing permissions: {', '.join(missing)}") + print(" The permissions may not be configured or admin consent is required.") + print(" Use --setup-permissions to add them, or visit:") + print( + f" https://login.microsoftonline.com/common/adminconsent" + f"?client_id={application_client_id}" + ) + + # ------------------------------------------------------------------ + # Check redirect URI configuration on the app registration + # ------------------------------------------------------------------ + redirect_ok = await _check_redirect_uri(credential, application_client_id) + if not redirect_ok: + all_ok = False + + return all_ok + + +async def _check_redirect_uri( + credential: Credential, application_client_id: str +) -> bool: + """Verify the app registration has http://localhost as a public client redirect URI. + + Queries ``GET /applications?$filter=appId eq '...'`` and inspects + ``publicClient.redirectUris``. Requires ``Application.Read.All`` or + ownership of the app registration. If the query fails due to + insufficient permissions, prints a warning and returns ``True`` + (optimistic — the caller cannot determine the answer). + """ + EXPECTED_REDIRECT_URI = "http://localhost" + + client = GraphServiceClient(credential, ["Application.Read.All"]) + + app_params = ( + ApplicationsRequestBuilder.ApplicationsRequestBuilderGetQueryParameters( + filter=f"appId eq '{application_client_id}'", + select=["id", "appId", "displayName", "publicClient", "web", "spa"], + ) + ) + app_config = RequestConfiguration(query_parameters=app_params) + + try: + apps_response = await client.applications.get(request_configuration=app_config) + except ODataError as e: + code = e.error.code if e.error else "Unknown" + message = e.error.message if e.error else str(e) + print(f"\nRedirect URI check skipped ({code}): {message}") + print(" This check requires Application.Read.All or app ownership.") + return True # optimistic — cannot verify + + if not apps_response or not apps_response.value: + print( + f"\nRedirect URI check: app registration not found ({application_client_id})" + ) + return False + + app = apps_response.value[0] + + # "Mobile and desktop applications" → publicClient.redirectUris + public_uris: list[str] = [] + if app.public_client and app.public_client.redirect_uris: + public_uris = list(app.public_client.redirect_uris) + + # Also show web and SPA redirect URIs for context + web_uris: list[str] = [] + if app.web and app.web.redirect_uris: + web_uris = list(app.web.redirect_uris) + + spa_uris: list[str] = [] + if app.spa and app.spa.redirect_uris: + spa_uris = list(app.spa.redirect_uris) + + print("\nRedirect URI configuration:") + if public_uris: + print(f" Mobile and desktop (publicClient): {', '.join(public_uris)}") + else: + print(" Mobile and desktop (publicClient): (none)") + if web_uris: + print(f" Web: {', '.join(web_uris)}") + if spa_uris: + print(f" SPA: {', '.join(spa_uris)}") + + if EXPECTED_REDIRECT_URI in public_uris: + print(f" {EXPECTED_REDIRECT_URI} in publicClient: OK") + return True + + print(f" {EXPECTED_REDIRECT_URI} in publicClient: NOT FOUND") + print(" To fix, go to Azure Portal > Microsoft Entra ID > App registrations") + print(f" > {application_client_id} > Authentication") + print(" > Add a platform > Mobile and desktop applications") + print(f" > check '{EXPECTED_REDIRECT_URI}' > Save") + return False + + +async def setup_permissions( + credential: Credential, application_client_id: str | None +) -> bool: + """Add the Mail.Read delegated permission to an app registration. + + Requires ``Application.ReadWrite.All`` or the Global Administrator role. + Returns ``True`` when the permission is already present or was added + successfully, ``False`` on failure. + """ + if not application_client_id: + print("--application-client-id is required for --setup-permissions.") + return False + + admin_scopes = ["Application.ReadWrite.All"] + admin_client = GraphServiceClient(credential, admin_scopes) + + try: + # Locate the application object by its client (app) ID + query_params = ( + ApplicationsRequestBuilder.ApplicationsRequestBuilderGetQueryParameters( + filter=f"appId eq '{application_client_id}'", + ) + ) + config = RequestConfiguration(query_parameters=query_params) + apps_response = await admin_client.applications.get( + request_configuration=config + ) + + if not apps_response or not apps_response.value: + print( + f"No application found with application_client_id: {application_client_id}" + ) + _print_manual_setup_instructions(application_client_id) + return False + + app = apps_response.value[0] + app_object_id = app.id + if not app_object_id: + print("Application object has no ID.") + return False + existing_access = list(app.required_resource_access or []) + + # Find or create the Microsoft Graph resource entry + graph_resource: RequiredResourceAccess | None = None + for resource in existing_access: + if resource.resource_app_id == GRAPH_APP_ID: + graph_resource = resource + break + + # Check whether Mail.Read is already configured + if graph_resource: + for access in graph_resource.resource_access or []: + if str(access.id) == MAIL_READ_SCOPE_ID: + print("Mail.Read permission is already configured.") + return True + + # Build the new permission entry + mail_read = ResourceAccess() + mail_read.id = UUID(MAIL_READ_SCOPE_ID) + mail_read.type = "Scope" + + if graph_resource: + graph_resource.resource_access = list( + graph_resource.resource_access or [] + ) + [mail_read] + else: + graph_resource = RequiredResourceAccess() + graph_resource.resource_app_id = GRAPH_APP_ID + graph_resource.resource_access = [mail_read] + existing_access.append(graph_resource) + + # Patch the application + update_body = Application() + update_body.required_resource_access = existing_access + await admin_client.applications.by_application_id(app_object_id).patch( + update_body + ) + + print("Mail.Read permission added to app registration.") + print("Admin consent may still be required. Visit:") + print( + f" https://login.microsoftonline.com/common/adminconsent" + f"?client_id={application_client_id}" + ) + return True + + except ODataError as e: + code = e.error.code if e.error else "Unknown" + message = e.error.message if e.error else str(e) + print(f"Failed to set up permissions ({code}): {message}") + _print_manual_setup_instructions(application_client_id) + return False + + +def _print_manual_setup_instructions(client_id: str) -> None: + """Print step-by-step portal instructions for adding Mail.Read.""" + print() + print("Manual setup instructions:") + print( + " 1. Go to https://portal.azure.com" + " > Microsoft Entra ID > App registrations" + ) + print(f" 2. Find or create an app with client ID: {client_id}") + print( + " 3. Authentication > Add a platform > Mobile and desktop applications" + " > check 'http://localhost' > Save" + ) + print( + " 4. API permissions > Add a permission" + " > Microsoft Graph > Delegated permissions" + ) + print(" 5. Search for 'Mail.Read' and add it") + print(" 6. Click 'Grant admin consent' if required by your organization") + print() + print(" Alternatively, use --device-code to skip redirect URI setup.") + + +# --------------------------------------------------------------------------- +# Message download +# --------------------------------------------------------------------------- + + +async def download_messages( + client: GraphServiceClient, + output_dir: Path, + max_results: int, + filter_query: str, + search_query: str, +) -> int: + """Download messages from the signed-in user's mailbox as ``.eml`` files. + + Messages are saved as ``000001.eml``, ``000002.eml``, … in *output_dir*. + Returns the number of messages successfully downloaded. + """ + output_dir.mkdir(parents=True, exist_ok=True) + + query_params = MessagesRequestBuilder.MessagesRequestBuilderGetQueryParameters( + top=min(max_results, 100), + select=["id", "subject", "receivedDateTime"], + orderby=["receivedDateTime desc"], + ) + if filter_query: + query_params.filter = filter_query + + config = RequestConfiguration(query_parameters=query_params) + + # KQL $search requires the eventual-consistency header + if search_query: + query_params.search = f'"{search_query}"' + config.headers.add("ConsistencyLevel", "eventual") + + count = 0 + response = await client.me.messages.get(request_configuration=config) + + while response and response.value: + for msg in response.value: + if count >= max_results: + return count + + if not msg.id: + continue + # GET /me/messages/{id}/$value → MIME content + mime_content = await client.me.messages.by_message_id(msg.id).content.get() + if mime_content is None: + print(f" [skip] empty MIME for message {msg.id}") + continue + + eml_path = output_dir / f"{count + 1:06d}.eml" + if isinstance(mime_content, bytes): + eml_path.write_bytes(mime_content) + else: + # Some SDK versions return a stream-like object + eml_path.write_bytes(mime_content.read()) + count += 1 + + subject = msg.subject or "(no subject)" + print(f" [{count}] {subject}") + + # Follow @odata.nextLink for the next page of results + if count < max_results and response.odata_next_link: + response = await client.me.messages.with_url(response.odata_next_link).get() + else: + break + + return count + + +# --------------------------------------------------------------------------- +# CLI +# --------------------------------------------------------------------------- + + +async def async_main(args: argparse.Namespace) -> None: + """Async entry point – routes to the requested action.""" + credential = get_credential( + args.application_client_id, + args.tenant_id, + args.device_code, + login_hint=args.client_id, + ) + client = GraphServiceClient(credential, REQUIRED_SCOPES) + + if args.check_app_reg_permissions: + ok = await check_permissions(client) + await check_app_registration_permissions(credential, args.application_client_id) + if not ok: + return + return + + if args.setup_permissions: + await setup_permissions(credential, args.application_client_id) + return + + # Default action: download messages (verify permissions first) + if not await check_permissions(client): + return + + print(f"Downloading up to {args.max_results} messages …") + start_time = time.time() + count = await download_messages( + client, args.output_dir, args.max_results, args.filter, args.search + ) + elapsed = time.time() - start_time + print(f"Downloaded {count} messages to {args.output_dir} in {elapsed:.1f}s") + + +def main() -> None: + """CLI entry point for Outlook mail dump.""" + from dotenv import load_dotenv + + load_dotenv() + + parser = argparse.ArgumentParser( + description="Download Outlook messages as .eml files via Microsoft Graph API", + ) + parser.add_argument( + "--client-id", + type=str, + default=os.environ.get("OUTLOOK_CLIENT_ID"), + help=("User email address for login / login_hint " "(env: OUTLOOK_CLIENT_ID)"), + ) + parser.add_argument( + "--application-client-id", + type=str, + default=os.environ.get("OUTLOOK_APPLICATION_CLIENT_ID"), + help=( + "Azure AD app registration client ID / GUID " + "(env: OUTLOOK_APPLICATION_CLIENT_ID)" + ), + ) + parser.add_argument( + "--tenant-id", + type=str, + default=os.environ.get("OUTLOOK_TENANT_ID", "common"), + help="Azure AD tenant ID (env: OUTLOOK_TENANT_ID, default: 'common')", + ) + parser.add_argument( + "--max-results", + type=int, + default=50, + help="Maximum number of messages to download (default: 50)", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=OUT, + help="Output directory for .eml files (default: mail_dump)", + ) + parser.add_argument( + "--filter", + type=str, + default="", + help=( + "OData $filter expression " + "(e.g. \"from/emailAddress/address eq 'user@example.com'\")" + ), + ) + parser.add_argument( + "--search", + type=str, + default="", + help="KQL $search query (e.g. 'subject:quarterly report')", + ) + parser.add_argument( + "--device-code", + action="store_true", + help="Use device-code flow instead of interactive browser auth", + ) + parser.add_argument( + "--check-app-reg-permissions", + action="store_true", + help="Only verify that required Graph API permissions are available", + ) + parser.add_argument( + "--setup-permissions", + action="store_true", + help=( + "Add required permissions to the app registration " + "(requires Application.ReadWrite.All or Global Admin)" + ), + ) + args = parser.parse_args() + + # Validate --client-id format when provided + if args.client_id: + email_pattern = re.compile(r"^[^@\s]+@[^@\s]+\.[^@\s]+$") + if not email_pattern.match(args.client_id): + parser.error( + f"--client-id must be an email address (e.g. 'user@example.com')," + f" got: '{args.client_id}'" + ) + + # Validate --application-client-id format when provided + if args.application_client_id: + uuid_pattern = re.compile( + r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", + re.IGNORECASE, + ) + if not uuid_pattern.match(args.application_client_id): + parser.error( + f"--application-client-id must be a GUID " + f"(e.g. 'a1b2c3d4-e5f6-7890-abcd-ef1234567890')," + f" got: '{args.application_client_id}'\n" + "Find it in Azure Portal > Microsoft Entra ID > App registrations" + " > your app > Application (client) ID" + ) + + if not args.application_client_id: + print( + "No --application-client-id or OUTLOOK_APPLICATION_CLIENT_ID set;" + " falling back to DefaultAzureCredential." + ) + + asyncio.run(async_main(args)) + + +if __name__ == "__main__": + main() diff --git a/tools/mail/pt.py b/tools/mail/pt.py new file mode 100644 index 00000000..2865d86b --- /dev/null +++ b/tools/mail/pt.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Print the Date header of .eml files as a hex POSIX timestamp.""" + +import email +import email.utils +import sys + + +def main() -> None: + for path in sys.argv[1:]: + with open(path, "rb") as f: + msg = email.message_from_binary_file(f) + date_str = msg["Date"] or "" + parsed = email.utils.parsedate_tz(date_str) + if parsed is None: + hex_ts = "????????" + else: + hex_ts = f"{int(email.utils.mktime_tz(parsed)) * 1000 << 20:016x}" + print(f"{path}\t{date_str:<40}\t{hex_ts}") + + +if __name__ == "__main__": + main() diff --git a/tools/query.py b/tools/query.py index cb597cd5..06c1331f 100644 --- a/tools/query.py +++ b/tools/query.py @@ -20,6 +20,7 @@ from colorama import Fore from colorama import init as colorama_init +from dotenv import load_dotenv import numpy as np readline = None @@ -31,18 +32,19 @@ import typechat -from typeagent.aitools import embeddings, utils +from typeagent.aitools import embeddings, model_adapters, utils from typeagent.knowpro import ( answer_response_schema, answers, - convknowledge, - kplib, +) +from typeagent.knowpro import ( query, search, search_query_schema, searchlang, serialization, ) +from typeagent.knowpro import knowledge_schema as kplib from typeagent.knowpro.convsettings import ConversationSettings from typeagent.knowpro.interfaces import ( IConversation, @@ -149,7 +151,7 @@ class ProcessingContext: debug2: typing.Literal["none", "diff", "full", "skip"] debug3: typing.Literal["none", "diff", "full", "nice"] debug4: typing.Literal["none", "diff", "full", "nice"] - embedding_model: embeddings.AsyncEmbeddingModel + embedding_model: embeddings.IEmbeddingModel query_translator: typechat.TypeChatJsonTranslator[search_query_schema.SearchQuery] answer_translator: typechat.TypeChatJsonTranslator[ answer_response_schema.AnswerResponse @@ -392,6 +394,7 @@ async def cmd_stage(context: ProcessingContext, args: list[str]) -> None: if last_stage < 4: return + context.answer_context_options.debug = context.debug4 == "full" all_answers, combined_answer = await answers.generate_answers( context.answer_translator, search_results, @@ -528,7 +531,7 @@ async def handle_at_command(context: ProcessingContext, line: str) -> None: async def main(): - utils.load_dotenv() + load_dotenv() colorama_init(autoreset=True) parser = make_arg_parser("TypeAgent Query Tool") @@ -575,7 +578,7 @@ async def main(): "Error: non-empty --search-results required for batch mode." ) - model = convknowledge.create_typechat_model() + model = model_adapters.create_chat_model() query_translator = utils.create_translator(model, search_query_schema.SearchQuery) if args.alt_schema: if args.verbose: @@ -610,7 +613,11 @@ async def main(): max_message_matches=25, ), answers.AnswerContextOptions( - entities_top_k=50, topics_top_k=50, messages_top_k=None, chunking=None + entities_top_k=50, + topics_top_k=50, + messages_top_k=None, + chunking=None, + debug=args.debug4 == "full", ), ConversationHistory(max_entries=args.history_size), ) diff --git a/tools/release.py b/tools/release.py index cc09bbdc..50bb95e8 100755 --- a/tools/release.py +++ b/tools/release.py @@ -282,14 +282,25 @@ def main(): # Check that uv is available if not check_uv_available(): - print("Error: 'uv' command not found. Please install uv first.", file=sys.stderr) - print(" Install with: curl -LsSf https://astral.sh/uv/install.sh | sh", file=sys.stderr) + print( + "Error: 'uv' command not found. Please install uv first.", file=sys.stderr + ) + print( + " Install with: curl -LsSf https://astral.sh/uv/install.sh | sh", + file=sys.stderr, + ) return 1 # Check that gh CLI is available if not check_gh_available(): - print("Error: 'gh' CLI not found. Please install GitHub CLI first.", file=sys.stderr) - print(" Install with: brew install gh (or see https://cli.github.com/)", file=sys.stderr) + print( + "Error: 'gh' CLI not found. Please install GitHub CLI first.", + file=sys.stderr, + ) + print( + " Install with: brew install gh (or see https://cli.github.com/)", + file=sys.stderr, + ) return 1 pyproject_path = current_dir / "pyproject.toml" @@ -297,9 +308,7 @@ def main(): # Check git status (unless --force) if not check_git_status(): if args.force: - print( - "Warning: Git working directory is not clean (forced)", - ) + print("Warning: Git working directory is not clean (forced)") else: print( "Error: Git working directory is not clean. Please commit or stash changes first.", @@ -364,7 +373,9 @@ def main(): return 1 # Git commit for release version - exit_code, _ = run_command(["git", "add", "pyproject.toml", "uv.lock"], args.dry_run) + exit_code, _ = run_command( + ["git", "add", "pyproject.toml", "uv.lock"], args.dry_run + ) if exit_code != 0: print("Error: Failed to stage pyproject.toml and uv.lock", file=sys.stderr) @@ -396,21 +407,27 @@ def main(): return 1 # Git commit for post-release version - exit_code, _ = run_command(["git", "add", "pyproject.toml", "uv.lock"], args.dry_run) + exit_code, _ = run_command( + ["git", "add", "pyproject.toml", "uv.lock"], args.dry_run + ) if exit_code != 0: print("Error: Failed to stage pyproject.toml and uv.lock", file=sys.stderr) return 1 post_commit_message = f"Bump version to {post_release_version} for development" - exit_code, _ = run_command(["git", "commit", "-m", post_commit_message], args.dry_run) + exit_code, _ = run_command( + ["git", "commit", "-m", post_commit_message], args.dry_run + ) if exit_code != 0: print("Error: Failed to commit post-release changes", file=sys.stderr) return 1 # Push branch and tag - exit_code, _ = run_command(["git", "push", "-u", "origin", branch_name], args.dry_run) + exit_code, _ = run_command( + ["git", "push", "-u", "origin", branch_name], args.dry_run + ) if exit_code != 0: print(f"Error: Failed to push branch {branch_name}", file=sys.stderr) @@ -426,13 +443,21 @@ def main(): pr_title = f"Release {new_version}" pr_body = f"## Release {new_version}\\n\\nThis PR contains:\\n- Version bump to {new_version}\\n- Tag {tag_name}\\n- Post-release version bump to {post_release_version}" exit_code, pr_url = run_command( - ["gh", "pr", "create", "--title", pr_title, "--body", pr_body], - args.dry_run + ["gh", "pr", "create", "--title", pr_title, "--body", pr_body], args.dry_run ) if exit_code != 0: - print("Error: Failed to create PR", file=sys.stderr) - return 1 + if args.force: + print( + "Warning: Failed to create PR -- you can create it yourself", + file=sys.stderr, + ) + else: + print( + "Error: Failed to create PR -- but you can create it yourself", + file=sys.stderr, + ) + return 1 if args.dry_run: print(f"\n[DRY RUN] Release process completed successfully!") @@ -443,8 +468,11 @@ def main(): print(f"\nRelease process completed successfully!") print(f"Created branch: {branch_name}") print(f"Created tag: {tag_name}") - print(f"Created PR: {pr_url}") + if exit_code == 0: + print(f"Created PR: {pr_url}") print(f"\nNext steps:") + if exit_code == 0: + print(f" 0. Create the PR (since it wasn't created)") print(f" 1. Get the PR approved and merged") print(f" 2. The GitHub Actions release workflow will be triggered by the tag") diff --git a/tools/test_email.py b/tools/test_email.py deleted file mode 100644 index a34a2648..00000000 --- a/tools/test_email.py +++ /dev/null @@ -1,527 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT License. - -import argparse -import asyncio -import os -from pathlib import Path -import shelve -import shlex -import sys -import traceback -from typing import Any, Awaitable, Callable, Iterable, Literal - -from colorama import Fore - -try: - import readline # type: ignore -except ImportError: - pass # readline not available on Windows - -from query import print_result - -import typechat - -from typeagent.aitools import utils -from typeagent.emails.email_import import import_email_from_file, import_emails_from_dir -from typeagent.emails.email_memory import EmailMemory -from typeagent.emails.email_message import EmailMessage -from typeagent.knowpro import convknowledge, kplib, search_query_schema, searchlang -from typeagent.knowpro.convsettings import ConversationSettings -from typeagent.knowpro.interfaces import IConversation -from typeagent.storage.utils import create_storage_provider - - -class ReallyExit(Exception): - pass - - -class EmailContext: - def __init__( - self, base_path: Path, db_name: str, conversation: EmailMemory - ) -> None: - self.base_path = base_path - self.db_name = db_name - self.db_path = base_path.joinpath(db_name) - self.conversation = conversation - self.query_translator: ( - typechat.TypeChatJsonTranslator[search_query_schema.SearchQuery] | None - ) = None - self.index_log = load_index_log(str(self.db_path), create_new=False) - - def get_translator(self): - if self.query_translator is None: - model = convknowledge.create_typechat_model() - self.query_translator = utils.create_translator( - model, search_query_schema.SearchQuery - ) - return self.query_translator - - async def load_conversation(self, db_name: str, create_new: bool = False): - await self.conversation.settings.storage_provider.close() - self.db_name = db_name - self.db_path = self.base_path.joinpath(db_name) - self.conversation = await load_or_create_email_index( - str(self.db_path), create_new - ) - self.index_log = load_index_log(str(self.db_path), create_new) - - # Delete the current conversation and re-create it - async def restart_conversation(self): - await self.load_conversation(self.db_name, create_new=True) - - def is_indexed(self, email_id: str | None) -> bool: - return bool(email_id and self.index_log.get(email_id)) - - def log_indexed(self, email_id: str | None) -> None: - if email_id is not None: - self.index_log[email_id] = True - - -CommandHandler = Callable[[EmailContext, list[str]], Awaitable[None]] - - -# Command decorator -def command(parser: argparse.ArgumentParser): - def decorator(func: Callable): - func.parser = parser # type: ignore - return func - - return decorator - - -async def main(): - - if sys.argv[1:2]: - base_path = Path(sys.argv[1]) - elif os.path.exists("/data"): - base_path = Path("/data/testChat/knowpro/email/") - else: - base_path = Path(".") - - try: - base_path.mkdir(parents=True, exist_ok=True) - except PermissionError as e: - print(e) - sys.exit(1) - - utils.load_dotenv() - - print("Email Memory Demo") - - default_db = "gmail.db" # "pyEmails.db" - db_path = str(base_path.joinpath(default_db)) - context = EmailContext( - base_path, - default_db, - conversation=await load_or_create_email_index(db_path, create_new=False), - ) - print(f"Using email memory at: {db_path}") - await print_conversation_stats(context.conversation) - - # Command handlers - cmd_handlers: dict[str, CommandHandler] = { - "@exit": exit_app, - "@quit": exit_app, - "@add_messages": add_messages, # Add messages - "@parse_messages": parse_messages, - "@load_index": load_index, - "@reset_index": reset_index, # Delete index and start over - "@search": search_index, # Search index - "@answer": generate_answer, # Question answer - } - - async def default_handler(context, line): - return await generate_answer(context, [line]) - - print("Type @help for a list of commands") - - while True: - try: - line = input("✉>> ").strip() - except EOFError: - print() - break - if not line: - continue - try: - if not line.startswith("@"): - await default_handler(context, line) - else: - try: - args = shlex.split(line, comments=True) - except ValueError as e: - print(Fore.RED + f"Error parsing command: {e}" + Fore.RESET) - continue - if len(args) < 1: - continue - cmd = args.pop(0).lower() - if cmd == "@help": - help(cmd_handlers, args) - else: - cmd_handler = cmd_handlers.get(cmd) - if cmd_handler: - await cmd_handler(context, args) - else: - print_commands(cmd_handlers) - except ReallyExit: - # Raised by exit_app() to reall exit the app - sys.exit(0) - except Exception as e: - print() - print(Fore.RED + f"Error\n: {e}" + Fore.RESET) - traceback.print_exc() - except SystemExit as e: - # Command handlers using argparse may see this - if e.code != 0: - print(Fore.RED + f"Error: {e}" + Fore.RESET) - except KeyboardInterrupt: - print() - - print(Fore.RESET) - - -# == -# COMMANDS -# == - - -# Adds messages. Takes a path either to a file or to a directory -def _add_messages_def() -> argparse.ArgumentParser: - cmd = argparse.ArgumentParser( - description="Add messages to index", prog="@add_messages" - ) - cmd.add_argument( - "--path", - default="", - help="Path to an .eml file or to a directory with .eml files", - ) - cmd.add_argument("--ignore_error", type=bool, default=True, help="Ignore errors") - cmd.add_argument( - "--knowledge", type=bool, default=True, help="Automatically extract knowledge" - ) - return cmd - - -@command(_add_messages_def()) -async def add_messages(context: EmailContext, args: list[str]): - named_args = _add_messages_def().parse_args(args) - if named_args.path is None: - print("No path provided") - return - - # Get the path to the email file or directory of emails to ingest - src_path = Path(named_args.path) - emails: Iterable[EmailMessage] - if src_path.is_file(): - emails = [import_email_from_file(str(src_path))] - else: - emails = import_emails_from_dir(str(src_path)) - - print(Fore.CYAN + f"Importing from {src_path}" + Fore.RESET) - - semantic_settings = context.conversation.settings.semantic_ref_index_settings - auto_knowledge = semantic_settings.auto_extract_knowledge - print(Fore.CYAN + f"auto_extract_knowledge={auto_knowledge}" + Fore.RESET) - try: - conversation = context.conversation - # Add one at a time for debugging etc. - for i, email in enumerate(emails): - email_id = email.metadata.id - email_src = email.src_url if email.src_url is not None else "" - print_progress(i + 1, None, email.src_url) - print() - if context.is_indexed(email_id): - print(Fore.GREEN + email_src + "[Already indexed]" + Fore.RESET) - continue - - try: - await conversation.add_messages_with_indexing([email]) - context.log_indexed(email_id) - except Exception as e: - if named_args.ignore_error: - print_error(f"{email.src_url}\n{e}") - print( - Fore.GREEN - + f"ignore_error = {named_args.ignore_error}" - + Fore.RESET - ) - else: - raise - finally: - semantic_settings.auto_extract_knowledge = auto_knowledge - - await print_conversation_stats(conversation) - - -async def search_index(context: EmailContext, args: list[str]): - if not args: - return - search_text = args[0].strip() - if not search_text: - print_error("No search text") - return - - print(Fore.CYAN + f"Searching for:\n{search_text} " + Fore.RESET) - - debug_context = searchlang.LanguageSearchDebugContext() - results = await context.conversation.query_debug( - search_text=search_text, - query_translator=context.get_translator(), - debug_context=debug_context, - ) - await print_search_results(context.conversation, debug_context, results) - - -async def generate_answer(context: EmailContext, args: list[str]): - if len(args) == 0: - return - question = args[0].strip() - if len(question) == 0: - print_error("No question") - return - - print(Fore.CYAN + f"Getting answer for:\n{question} " + Fore.RESET) - - answer = await context.conversation.query(question) - color = Fore.RED if answer.startswith("No answer found:") else Fore.GREEN - print(color + answer + Fore.RESET) - - -async def reset_index(context: EmailContext, args: list[str]): - print(f"Deleting {context.db_path}") - await context.restart_conversation() - await print_conversation_stats(context.conversation) - - -def _load_index_def() -> argparse.ArgumentParser: - cmdDef = argparse.ArgumentParser( - description="Load index at given db path", prog="@load_index" - ) - cmdDef.add_argument( - "--name", type=str, default="", help="Name of the index to load" - ) - cmdDef.add_argument("--new", type=bool, default=False) - return cmdDef - - -@command(_load_index_def()) -async def load_index(context: EmailContext, args: list[str]): - named_args = _load_index_def().parse_args(args) - - db_name: str = named_args.name - if len(db_name) == 0: - return - - if not db_name.endswith(".db"): - db_name += ".db" - print(db_name) - await context.load_conversation(db_name, named_args.new) - - -def _parse_messages_def() -> argparse.ArgumentParser: - cmdDef = argparse.ArgumentParser(description="Parse messages in the given path") - cmdDef.add_argument("--path", type=str, default="") - cmdDef.add_argument("--verbose", type=bool, default=False) - return cmdDef - - -@command(_parse_messages_def()) -async def parse_messages(context: EmailContext, args: list[str]): - named_args = _parse_messages_def().parse_args(args) - src_path = Path(named_args.path) - file_paths: list[str] - if src_path.is_file(): - file_paths = [str(src_path)] - else: - file_paths = [ - str(file_path) - for file_path in Path(src_path).iterdir() - if file_path.is_file() - ] - - print(f"Parsing {len(file_paths)} messages") - for file_path in file_paths: - try: - msg = import_email_from_file(file_path) - print(file_path) - print("####################") - print_email(msg) - if named_args.verbose: - print_knowledge(msg.get_knowledge()) - print("####################") - - except Exception as e: - print_error(file_path) - print_error(str(e)) - - -async def exit_app(context: EmailContext, args: list[str]): - print("Goodbye") - raise ReallyExit() - - -def help(handlers: dict[str, CommandHandler], args: list[str]): - if len(args) > 0: - name = args[0] - if not name.startswith("@"): - name = "@" + name - cmd = handlers.get(name) - if cmd is not None: - print_help(cmd) - return - - print_commands(handlers) - print("@help for details") - - -# -# Utilities -# -async def load_or_create_email_index(db_path: str, create_new: bool) -> EmailMemory: - if create_new: - delete_sqlite_db(db_path) - - settings = ConversationSettings() - settings.storage_provider = await create_storage_provider( - settings.message_text_index_settings, - settings.related_term_index_settings, - db_path, - EmailMessage, - ) - email_memory = await EmailMemory.create(settings) - return email_memory - - -def load_index_log(db_path: str, create_new: bool) -> shelve.Shelf[Any]: - log_path = db_path + ".index_log" - index_log = shelve.open(log_path) - if create_new: - index_log.clear() - return index_log - - -def delete_sqlite_db(db_path: str): - if os.path.exists(db_path): - os.remove(db_path) # Delete existing database for clean test - # Also delete -shm and -wal files if they exist - shm_path = db_path + "-shm" - wal_path = db_path + "-wal" - if os.path.exists(shm_path): - os.remove(shm_path) - if os.path.exists(wal_path): - os.remove(wal_path) - - -# ========================= -# -# Printing -# -# ========================= - - -def print_help(handler: CommandHandler): - if hasattr(handler, "parser"): - parser: argparse.ArgumentParser = handler.parser # type: ignore - print(parser.format_help()) - print() - - -def print_commands(commands: dict[str, CommandHandler]): - names = list(commands.keys()) - names.append("@help") - names.sort() - print_list(Fore.GREEN, names, "COMMANDS", "ul") - - -def print_email(email: EmailMessage): - print("From:", email.metadata.sender) - print("To:", ", ".join(email.metadata.recipients)) - if email.metadata.cc: - print("Cc:", ", ".join(email.metadata.cc)) - if email.metadata.bcc: - print("Bcc:", ", ".join(email.metadata.bcc)) - if email.metadata.subject: - print("Subject:", email.metadata.subject) - print("Date:", email.timestamp) - - print("Body:") - for chunk in email.text_chunks: - print(Fore.CYAN + chunk + Fore.RESET) - - print(Fore.RESET) - - -def print_knowledge(knowledge: kplib.KnowledgeResponse): - print_list(Fore.GREEN, knowledge.topics, "Topics") - print() - print_list(Fore.GREEN, knowledge.entities, "Entities") - print() - print_list(Fore.GREEN, knowledge.actions, "Actions") - print() - print(Fore.RESET) - - -async def print_conversation_stats(conversation: IConversation): - print(f"Conversation index stats".upper()) - print(f"Message count: {await conversation.messages.size()}") - print(f"Semantic Ref count: {await conversation.semantic_refs.size()}") - - -async def print_search_results( - conversation: IConversation, - debug_context: searchlang.LanguageSearchDebugContext, - results: typechat.Result[list[searchlang.ConversationSearchResult]], -): - print(Fore.CYAN) - utils.pretty_print(debug_context.search_query) - utils.pretty_print(debug_context.search_query_expr) - if isinstance(results, typechat.Failure): - print_error(results.message) - else: - print(Fore.GREEN, "### SEARCH RESULTS") - print() - search_results = results.value - for search_result in search_results: - print(Fore.GREEN, search_result.raw_query_text) - await print_result(search_result, conversation) - print(Fore.RESET) - - -def print_list( - color, list: Iterable[Any], title: str, type: Literal["plain", "ol", "ul"] = "plain" -): - print(color) - if title: - print(f"# {title}\n") - if type == "plain": - for item in list: - print(item) - elif type == "ul": - for item in list: - print(f"- {item}") - elif type == "ol": - for i, item in enumerate(list): - print(f"{i + 1}. {item}") - print(Fore.RESET) - - -def print_error(msg: str): - print(Fore.RED + msg + Fore.RESET) - - -def print_progress(cur: int, total: int | None = None, suffix: str | None = "") -> None: - if suffix is None: - suffix = "" - if total is not None: - print(f"[{cur} / {total}] {suffix}\r", end="", flush=True) - else: - print(f"[{cur}] {suffix}\r", end="", flush=True) - - -if __name__ == "__main__": - try: - asyncio.run(main()) - except (KeyboardInterrupt, BrokenPipeError): - print() - sys.exit(1) diff --git a/uv.lock b/uv.lock index 3717ed4c..86f16351 100644 --- a/uv.lock +++ b/uv.lock @@ -6,6 +6,122 @@ resolution-markers = [ "python_full_version < '3.13'", ] +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/d76bf503005709e390122d34e15256b88f7008e246c4bdbe915cd4f1adce/aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61", size = 742930, upload-time = "2026-03-31T21:58:13.155Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/4b7b70223deaebd9bb85984d01a764b0d7bd6526fcdc73cca83bcbe7243e/aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832", size = 496927, upload-time = "2026-03-31T21:58:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/0fb20fb49f8efdcdce6cd8127604ad2c503e754a8f139f5e02b01626523f/aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9", size = 497141, upload-time = "2026-03-31T21:58:17.009Z" }, + { url = "https://files.pythonhosted.org/packages/3b/86/b7c870053e36a94e8951b803cb5b909bfbc9b90ca941527f5fcafbf6b0fa/aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090", size = 1732476, upload-time = "2026-03-31T21:58:18.925Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/4e161f84f98d80c03a238671b4136e6530453d65262867d989bbe78244d0/aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b", size = 1706507, upload-time = "2026-03-31T21:58:21.094Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/ea11a9f01518bd5a2a2fcee869d248c4b8a0cfa0bb13401574fa31adf4d4/aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a", size = 1773465, upload-time = "2026-03-31T21:58:23.159Z" }, + { url = "https://files.pythonhosted.org/packages/eb/40/333ca27fb74b0383f17c90570c748f7582501507307350a79d9f9f3c6eb1/aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8", size = 1873523, upload-time = "2026-03-31T21:58:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d2/e2f77eef1acb7111405433c707dc735e63f67a56e176e72e9e7a2cd3f493/aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665", size = 1754113, upload-time = "2026-03-31T21:58:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/fb/56/3f653d7f53c89669301ec9e42c95233e2a0c0a6dd051269e6e678db4fdb0/aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540", size = 1562351, upload-time = "2026-03-31T21:58:29.918Z" }, + { url = "https://files.pythonhosted.org/packages/ec/a6/9b3e91eb8ae791cce4ee736da02211c85c6f835f1bdfac0594a8a3b7018c/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb", size = 1693205, upload-time = "2026-03-31T21:58:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/98/fc/bfb437a99a2fcebd6b6eaec609571954de2ed424f01c352f4b5504371dd3/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46", size = 1730618, upload-time = "2026-03-31T21:58:34.728Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b6/c8534862126191a034f68153194c389addc285a0f1347d85096d349bbc15/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8", size = 1745185, upload-time = "2026-03-31T21:58:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/0b/93/4ca8ee2ef5236e2707e0fd5fecb10ce214aee1ff4ab307af9c558bda3b37/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d", size = 1557311, upload-time = "2026-03-31T21:58:39.38Z" }, + { url = "https://files.pythonhosted.org/packages/57/ae/76177b15f18c5f5d094f19901d284025db28eccc5ae374d1d254181d33f4/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6", size = 1773147, upload-time = "2026-03-31T21:58:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/62f05a0a98d88af59d93b7fcac564e5f18f513cb7471696ac286db970d6a/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c", size = 1730356, upload-time = "2026-03-31T21:58:44.049Z" }, + { url = "https://files.pythonhosted.org/packages/e4/85/fc8601f59dfa8c9523808281f2da571f8b4699685f9809a228adcc90838d/aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc", size = 432637, upload-time = "2026-03-31T21:58:46.167Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/46572759afc859e867a5bc8ec3487315869013f59281ce61764f76d879de/aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c", size = 745721, upload-time = "2026-03-31T21:58:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/13/fe/8a2efd7626dbe6049b2ef8ace18ffda8a4dfcbe1bcff3ac30c0c7575c20b/aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be", size = 497663, upload-time = "2026-03-31T21:58:52.232Z" }, + { url = "https://files.pythonhosted.org/packages/9b/91/cc8cc78a111826c54743d88651e1687008133c37e5ee615fee9b57990fac/aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25", size = 499094, upload-time = "2026-03-31T21:58:54.566Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/a8362cb15cf16a3af7e86ed11962d5cd7d59b449202dc576cdc731310bde/aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56", size = 1726701, upload-time = "2026-03-31T21:58:56.864Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/c091ac5c3a17114bd76cbf85d674650969ddf93387876cf67f754204bd77/aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2", size = 1683360, upload-time = "2026-03-31T21:58:59.072Z" }, + { url = "https://files.pythonhosted.org/packages/23/73/bcee1c2b79bc275e964d1446c55c54441a461938e70267c86afaae6fba27/aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a", size = 1773023, upload-time = "2026-03-31T21:59:01.776Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ef/720e639df03004fee2d869f771799d8c23046dec47d5b81e396c7cda583a/aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be", size = 1853795, upload-time = "2026-03-31T21:59:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/989f4034fb46841208de7aeeac2c6d8300745ab4f28c42f629ba77c2d916/aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b", size = 1730405, upload-time = "2026-03-31T21:59:07.221Z" }, + { url = "https://files.pythonhosted.org/packages/ce/75/ee1fd286ca7dc599d824b5651dad7b3be7ff8d9a7e7b3fe9820d9180f7db/aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94", size = 1558082, upload-time = "2026-03-31T21:59:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/c3/20/1e9e6650dfc436340116b7aa89ff8cb2bbdf0abc11dfaceaad8f74273a10/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d", size = 1692346, upload-time = "2026-03-31T21:59:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/8ebc6658d48ea630ac7903912fe0dd4e262f0e16825aa4c833c56c9f1f56/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7", size = 1698891, upload-time = "2026-03-31T21:59:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/d8/78/ea0ae5ec8ba7a5c10bdd6e318f1ba5e76fcde17db8275188772afc7917a4/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772", size = 1742113, upload-time = "2026-03-31T21:59:17.068Z" }, + { url = "https://files.pythonhosted.org/packages/8a/66/9d308ed71e3f2491be1acb8769d96c6f0c47d92099f3bc9119cada27b357/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5", size = 1553088, upload-time = "2026-03-31T21:59:19.541Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/6cc25ed8dfc6e00c90f5c6d126a98e2cf28957ad06fa1036bd34b6f24a2c/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1", size = 1757976, upload-time = "2026-03-31T21:59:22.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2b/cce5b0ffe0de99c83e5e36d8f828e4161e415660a9f3e58339d07cce3006/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b", size = 1712444, upload-time = "2026-03-31T21:59:24.635Z" }, + { url = "https://files.pythonhosted.org/packages/6c/cf/9e1795b4160c58d29421eafd1a69c6ce351e2f7c8d3c6b7e4ca44aea1a5b/aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3", size = 438128, upload-time = "2026-03-31T21:59:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/22/4d/eaedff67fc805aeba4ba746aec891b4b24cebb1a7d078084b6300f79d063/aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162", size = 464029, upload-time = "2026-03-31T21:59:29.429Z" }, + { url = "https://files.pythonhosted.org/packages/79/11/c27d9332ee20d68dd164dc12a6ecdef2e2e35ecc97ed6cf0d2442844624b/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a", size = 778758, upload-time = "2026-03-31T21:59:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/04/fb/377aead2e0a3ba5f09b7624f702a964bdf4f08b5b6728a9799830c80041e/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254", size = 512883, upload-time = "2026-03-31T21:59:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a6/aa109a33671f7a5d3bd78b46da9d852797c5e665bfda7d6b373f56bff2ec/aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36", size = 516668, upload-time = "2026-03-31T21:59:36.497Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/ca078f9f2fa9563c36fb8ef89053ea2bb146d6f792c5104574d49d8acb63/aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f", size = 1883461, upload-time = "2026-03-31T21:59:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e3/a7ad633ca1ca497b852233a3cce6906a56c3225fb6d9217b5e5e60b7419d/aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800", size = 1747661, upload-time = "2026-03-31T21:59:41.187Z" }, + { url = "https://files.pythonhosted.org/packages/33/b9/cd6fe579bed34a906d3d783fe60f2fa297ef55b27bb4538438ee49d4dc41/aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf", size = 1863800, upload-time = "2026-03-31T21:59:43.84Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3f/2c1e2f5144cefa889c8afd5cf431994c32f3b29da9961698ff4e3811b79a/aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b", size = 1958382, upload-time = "2026-03-31T21:59:46.187Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/f31ec3f1013723b3babe3609e7f119c2c2fb6ef33da90061a705ef3e1bc8/aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a", size = 1803724, upload-time = "2026-03-31T21:59:48.656Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b4/57712dfc6f1542f067daa81eb61da282fab3e6f1966fca25db06c4fc62d5/aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8", size = 1640027, upload-time = "2026-03-31T21:59:51.284Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/734c878fb43ec083d8e31bf029daae1beafeae582d1b35da234739e82ee7/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be", size = 1806644, upload-time = "2026-03-31T21:59:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/a5/f671e5cbec1c21d044ff3078223f949748f3a7f86b14e34a365d74a5d21f/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b", size = 1791630, upload-time = "2026-03-31T21:59:56.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/fb8d0ad63a0b8a99be97deac8c04dacf0785721c158bdf23d679a87aa99e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6", size = 1809403, upload-time = "2026-03-31T21:59:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/59/0c/bfed7f30662fcf12206481c2aac57dedee43fe1c49275e85b3a1e1742294/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037", size = 1634924, upload-time = "2026-03-31T22:00:02.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d6/fd518d668a09fd5a3319ae5e984d4d80b9a4b3df4e21c52f02251ef5a32e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500", size = 1836119, upload-time = "2026-03-31T22:00:04.756Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/15fb7a9d52e112a25b621c67b69c167805cb1f2ab8f1708a5c490d1b52fe/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9", size = 1772072, upload-time = "2026-03-31T22:00:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/57ba7f0c4a553fc2bd8b6321df236870ec6fd64a2a473a8a13d4f733214e/aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8", size = 471819, upload-time = "2026-03-31T22:00:10.277Z" }, + { url = "https://files.pythonhosted.org/packages/62/29/2f8418269e46454a26171bfdd6a055d74febf32234e474930f2f60a17145/aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9", size = 505441, upload-time = "2026-03-31T22:00:12.791Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -17,24 +133,24 @@ wheels = [ [[package]] name = "anyio" -version = "4.12.1" +version = "4.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, ] [[package]] name = "attrs" -version = "25.4.0" +version = "26.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] @@ -48,20 +164,20 @@ wheels = [ [[package]] name = "azure-core" -version = "1.38.0" +version = "1.39.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/83/bbde3faa84ddcb8eb0eca4b3ffb3221252281db4ce351300fe248c5c70b1/azure_core-1.39.0.tar.gz", hash = "sha256:8a90a562998dd44ce84597590fff6249701b98c0e8797c95fcdd695b54c35d74", size = 367531, upload-time = "2026-03-19T01:31:29.461Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d6/8ebcd05b01a580f086ac9a97fb9fac65c09a4b012161cc97c21a336e880b/azure_core-1.39.0-py3-none-any.whl", hash = "sha256:4ac7b70fab5438c3f68770649a78daf97833caa83827f91df9c14e0e0ea7d34f", size = 218318, upload-time = "2026-03-19T01:31:31.25Z" }, ] [[package]] name = "azure-identity" -version = "1.25.1" +version = "1.25.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, @@ -70,9 +186,9 @@ dependencies = [ { name = "msal-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826, upload-time = "2025-10-06T20:30:02.194Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/0e/3a63efb48aa4a5ae2cfca61ee152fbcb668092134d3eb8bfda472dd5c617/azure_identity-1.25.3.tar.gz", hash = "sha256:ab23c0d63015f50b630ef6c6cf395e7262f439ce06e5d07a64e874c724f8d9e6", size = 286304, upload-time = "2026-03-13T01:12:20.892Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317, upload-time = "2025-10-06T20:30:04.251Z" }, + { url = "https://files.pythonhosted.org/packages/49/9a/417b3a533e01953a7c618884df2cb05a71e7b68bdbce4fbdb62349d2a2e8/azure_identity-1.25.3-py3-none-any.whl", hash = "sha256:f4d0b956a8146f30333e071374171f3cfa7bdb8073adb8c3814b65567aa7447c", size = 192138, upload-time = "2026-03-13T01:12:22.951Z" }, ] [[package]] @@ -103,21 +219,21 @@ wheels = [ [[package]] name = "azure-mgmt-keyvault" -version = "13.0.0" +version = "14.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-mgmt-core" }, { name = "isodate" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/44/d453a7a125cb44f6443808f11c820a4c3f88d0af2c5b8d9adaf490ed064e/azure_mgmt_keyvault-13.0.0.tar.gz", hash = "sha256:56c12904e6d9ac49f886483e50e3f635d8bf43a489eb32fa7b4832f323d396c7", size = 102260, upload-time = "2025-12-11T10:14:41.609Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/dd/e74f82797ee3ff7001832427a218f3079e6174b9fa8b354b84434172d89d/azure_mgmt_keyvault-14.0.1.tar.gz", hash = "sha256:d141a8084ae4c7c5bd1cafeca49a8f3fbebc58dc5bc5290f322ea73d8b307ef7", size = 105496, upload-time = "2026-03-27T08:02:15.282Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/e4/f11ae6caa523834be04a1d52dee11c79f8838e6752f95af3bcfd4f979713/azure_mgmt_keyvault-13.0.0-py3-none-any.whl", hash = "sha256:02c5ca1b428fa7a2c393c6891b7436ad529a7ad22d378eba0b9a26291da67a0d", size = 102054, upload-time = "2025-12-11T10:14:43.064Z" }, + { url = "https://files.pythonhosted.org/packages/9c/c3/29238b1ef3784c31722b900c9c187d7c23dd0d7e724e970f3a7c104ed7aa/azure_mgmt_keyvault-14.0.1-py3-none-any.whl", hash = "sha256:7710873c5b667e19d86109caf2898dddb902e5ed21013e01d7d85ebb496928d7", size = 104380, upload-time = "2026-03-27T08:02:16.726Z" }, ] [[package]] name = "black" -version = "25.12.0" +version = "26.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -127,42 +243,33 @@ dependencies = [ { name = "platformdirs" }, { name = "pytokens" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/d9/07b458a3f1c525ac392b5edc6b191ff140b596f9d77092429417a54e249d/black-25.12.0.tar.gz", hash = "sha256:8d3dd9cea14bff7ddc0eb243c811cdb1a011ebb4800a5f0335a01a68654796a7", size = 659264, upload-time = "2025-12-08T01:40:52.501Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/bd/26083f805115db17fda9877b3c7321d08c647df39d0df4c4ca8f8450593e/black-25.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31f96b7c98c1ddaeb07dc0f56c652e25bdedaac76d5b68a059d998b57c55594a", size = 1924178, upload-time = "2025-12-08T01:49:51.048Z" }, - { url = "https://files.pythonhosted.org/packages/89/6b/ea00d6651561e2bdd9231c4177f4f2ae19cc13a0b0574f47602a7519b6ca/black-25.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05dd459a19e218078a1f98178c13f861fe6a9a5f88fc969ca4d9b49eb1809783", size = 1742643, upload-time = "2025-12-08T01:49:59.09Z" }, - { url = "https://files.pythonhosted.org/packages/6d/f3/360fa4182e36e9875fabcf3a9717db9d27a8d11870f21cff97725c54f35b/black-25.12.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1f68c5eff61f226934be6b5b80296cf6939e5d2f0c2f7d543ea08b204bfaf59", size = 1800158, upload-time = "2025-12-08T01:44:27.301Z" }, - { url = "https://files.pythonhosted.org/packages/f8/08/2c64830cb6616278067e040acca21d4f79727b23077633953081c9445d61/black-25.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:274f940c147ddab4442d316b27f9e332ca586d39c85ecf59ebdea82cc9ee8892", size = 1426197, upload-time = "2025-12-08T01:45:51.198Z" }, - { url = "https://files.pythonhosted.org/packages/d4/60/a93f55fd9b9816b7432cf6842f0e3000fdd5b7869492a04b9011a133ee37/black-25.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:169506ba91ef21e2e0591563deda7f00030cb466e747c4b09cb0a9dae5db2f43", size = 1237266, upload-time = "2025-12-08T01:45:10.556Z" }, - { url = "https://files.pythonhosted.org/packages/c8/52/c551e36bc95495d2aa1a37d50566267aa47608c81a53f91daa809e03293f/black-25.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a05ddeb656534c3e27a05a29196c962877c83fa5503db89e68857d1161ad08a5", size = 1923809, upload-time = "2025-12-08T01:46:55.126Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f7/aac9b014140ee56d247e707af8db0aae2e9efc28d4a8aba92d0abd7ae9d1/black-25.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ec77439ef3e34896995503865a85732c94396edcc739f302c5673a2315e1e7f", size = 1742384, upload-time = "2025-12-08T01:49:37.022Z" }, - { url = "https://files.pythonhosted.org/packages/74/98/38aaa018b2ab06a863974c12b14a6266badc192b20603a81b738c47e902e/black-25.12.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e509c858adf63aa61d908061b52e580c40eae0dfa72415fa47ac01b12e29baf", size = 1798761, upload-time = "2025-12-08T01:46:05.386Z" }, - { url = "https://files.pythonhosted.org/packages/16/3a/a8ac542125f61574a3f015b521ca83b47321ed19bb63fe6d7560f348bfe1/black-25.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:252678f07f5bac4ff0d0e9b261fbb029fa530cfa206d0a636a34ab445ef8ca9d", size = 1429180, upload-time = "2025-12-08T01:45:34.903Z" }, - { url = "https://files.pythonhosted.org/packages/e6/2d/bdc466a3db9145e946762d52cd55b1385509d9f9004fec1c97bdc8debbfb/black-25.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bc5b1c09fe3c931ddd20ee548511c64ebf964ada7e6f0763d443947fd1c603ce", size = 1239350, upload-time = "2025-12-08T01:46:09.458Z" }, - { url = "https://files.pythonhosted.org/packages/35/46/1d8f2542210c502e2ae1060b2e09e47af6a5e5963cb78e22ec1a11170b28/black-25.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0a0953b134f9335c2434864a643c842c44fba562155c738a2a37a4d61f00cad5", size = 1917015, upload-time = "2025-12-08T01:53:27.987Z" }, - { url = "https://files.pythonhosted.org/packages/41/37/68accadf977672beb8e2c64e080f568c74159c1aaa6414b4cd2aef2d7906/black-25.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2355bbb6c3b76062870942d8cc450d4f8ac71f9c93c40122762c8784df49543f", size = 1741830, upload-time = "2025-12-08T01:54:36.861Z" }, - { url = "https://files.pythonhosted.org/packages/ac/76/03608a9d8f0faad47a3af3a3c8c53af3367f6c0dd2d23a84710456c7ac56/black-25.12.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9678bd991cc793e81d19aeeae57966ee02909877cb65838ccffef24c3ebac08f", size = 1791450, upload-time = "2025-12-08T01:44:52.581Z" }, - { url = "https://files.pythonhosted.org/packages/06/99/b2a4bd7dfaea7964974f947e1c76d6886d65fe5d24f687df2d85406b2609/black-25.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:97596189949a8aad13ad12fcbb4ae89330039b96ad6742e6f6b45e75ad5cfd83", size = 1452042, upload-time = "2025-12-08T01:46:13.188Z" }, - { url = "https://files.pythonhosted.org/packages/b2/7c/d9825de75ae5dd7795d007681b752275ea85a1c5d83269b4b9c754c2aaab/black-25.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:778285d9ea197f34704e3791ea9404cd6d07595745907dd2ce3da7a13627b29b", size = 1267446, upload-time = "2025-12-08T01:46:14.497Z" }, - { url = "https://files.pythonhosted.org/packages/68/11/21331aed19145a952ad28fca2756a1433ee9308079bd03bd898e903a2e53/black-25.12.0-py3-none-any.whl", hash = "sha256:48ceb36c16dbc84062740049eef990bb2ce07598272e673c17d1a7720c71c828", size = 206191, upload-time = "2025-12-08T01:40:50.963Z" }, -] - -[[package]] -name = "cachetools" -version = "6.2.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/c5/61175d618685d42b005847464b8fb4743a67b1b8fdb75e50e5a96c31a27a/black-26.3.1.tar.gz", hash = "sha256:2c50f5063a9641c7eed7795014ba37b0f5fa227f3d408b968936e24bc0566b07", size = 666155, upload-time = "2026-03-12T03:36:03.593Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f8/da5eae4fc75e78e6dceb60624e1b9662ab00d6b452996046dfa9b8a6025b/black-26.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e6f89631eb88a7302d416594a32faeee9fb8fb848290da9d0a5f2903519fc1", size = 1895920, upload-time = "2026-03-12T03:40:13.921Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9f/04e6f26534da2e1629b2b48255c264cabf5eedc5141d04516d9d68a24111/black-26.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41cd2012d35b47d589cb8a16faf8a32ef7a336f56356babd9fcf70939ad1897f", size = 1718499, upload-time = "2026-03-12T03:40:15.239Z" }, + { url = "https://files.pythonhosted.org/packages/04/91/a5935b2a63e31b331060c4a9fdb5a6c725840858c599032a6f3aac94055f/black-26.3.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f76ff19ec5297dd8e66eb64deda23631e642c9393ab592826fd4bdc97a4bce7", size = 1794994, upload-time = "2026-03-12T03:40:17.124Z" }, + { url = "https://files.pythonhosted.org/packages/e7/0a/86e462cdd311a3c2a8ece708d22aba17d0b2a0d5348ca34b40cdcbea512e/black-26.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ddb113db38838eb9f043623ba274cfaf7d51d5b0c22ecb30afe58b1bb8322983", size = 1420867, upload-time = "2026-03-12T03:40:18.83Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e5/22515a19cb7eaee3440325a6b0d95d2c0e88dd180cb011b12ae488e031d1/black-26.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:dfdd51fc3e64ea4f35873d1b3fb25326773d55d2329ff8449139ebaad7357efb", size = 1230124, upload-time = "2026-03-12T03:40:20.425Z" }, + { url = "https://files.pythonhosted.org/packages/f5/77/5728052a3c0450c53d9bb3945c4c46b91baa62b2cafab6801411b6271e45/black-26.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:855822d90f884905362f602880ed8b5df1b7e3ee7d0db2502d4388a954cc8c54", size = 1895034, upload-time = "2026-03-12T03:40:21.813Z" }, + { url = "https://files.pythonhosted.org/packages/52/73/7cae55fdfdfbe9d19e9a8d25d145018965fe2079fa908101c3733b0c55a0/black-26.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8a33d657f3276328ce00e4d37fe70361e1ec7614da5d7b6e78de5426cb56332f", size = 1718503, upload-time = "2026-03-12T03:40:23.666Z" }, + { url = "https://files.pythonhosted.org/packages/e1/87/af89ad449e8254fdbc74654e6467e3c9381b61472cc532ee350d28cfdafb/black-26.3.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1cd08e99d2f9317292a311dfe578fd2a24b15dbce97792f9c4d752275c1fa56", size = 1793557, upload-time = "2026-03-12T03:40:25.497Z" }, + { url = "https://files.pythonhosted.org/packages/43/10/d6c06a791d8124b843bf325ab4ac7d2f5b98731dff84d6064eafd687ded1/black-26.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:c7e72339f841b5a237ff14f7d3880ddd0fc7f98a1199e8c4327f9a4f478c1839", size = 1422766, upload-time = "2026-03-12T03:40:27.14Z" }, + { url = "https://files.pythonhosted.org/packages/59/4f/40a582c015f2d841ac24fed6390bd68f0fc896069ff3a886317959c9daf8/black-26.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:afc622538b430aa4c8c853f7f63bc582b3b8030fd8c80b70fb5fa5b834e575c2", size = 1232140, upload-time = "2026-03-12T03:40:28.882Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/e36e27c9cebc1311b7579210df6f1c86e50f2d7143ae4fcf8a5017dc8809/black-26.3.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2d6bfaf7fd0993b420bed691f20f9492d53ce9a2bcccea4b797d34e947318a78", size = 1889234, upload-time = "2026-03-12T03:40:30.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7b/9871acf393f64a5fa33668c19350ca87177b181f44bb3d0c33b2d534f22c/black-26.3.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f89f2ab047c76a9c03f78d0d66ca519e389519902fa27e7a91117ef7611c0568", size = 1720522, upload-time = "2026-03-12T03:40:32.346Z" }, + { url = "https://files.pythonhosted.org/packages/03/87/e766c7f2e90c07fb7586cc787c9ae6462b1eedab390191f2b7fc7f6170a9/black-26.3.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b07fc0dab849d24a80a29cfab8d8a19187d1c4685d8a5e6385a5ce323c1f015f", size = 1787824, upload-time = "2026-03-12T03:40:33.636Z" }, + { url = "https://files.pythonhosted.org/packages/ac/94/2424338fb2d1875e9e83eed4c8e9c67f6905ec25afd826a911aea2b02535/black-26.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:0126ae5b7c09957da2bdbd91a9ba1207453feada9e9fe51992848658c6c8e01c", size = 1445855, upload-time = "2026-03-12T03:40:35.442Z" }, + { url = "https://files.pythonhosted.org/packages/86/43/0c3338bd928afb8ee7471f1a4eec3bdbe2245ccb4a646092a222e8669840/black-26.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:92c0ec1f2cc149551a2b7b47efc32c866406b6891b0ee4625e95967c8f4acfb1", size = 1258109, upload-time = "2026-03-12T03:40:36.832Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0d/52d98722666d6fc6c3dd4c76df339501d6efd40e0ff95e6186a7b7f0befd/black-26.3.1-py3-none-any.whl", hash = "sha256:2bd5aa94fc267d38bb21a70d7410a89f1a1d318841855f698746f8e7f51acd1b", size = 207542, upload-time = "2026-03-12T03:36:01.668Z" }, ] [[package]] name = "certifi" -version = "2026.1.4" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] @@ -224,71 +331,87 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, - { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, - { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, - { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, - { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, - { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, - { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, - { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, - { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, - { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, - { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, - { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" }, + { url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" }, + { url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" }, + { url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" }, + { url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" }, + { url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" }, + { url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" }, + { url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" }, + { url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" }, + { url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" }, + { url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" }, + { url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" }, + { url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" }, + { url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" }, + { url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" }, + { url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" }, + { url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" }, + { url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" }, + { url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" }, + { url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" }, + { url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, ] [[package]] name = "click" -version = "8.3.1" +version = "8.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/75/31212c6bf2503fdf920d87fee5d7a86a2e3bcf444984126f13d8e4016804/click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5", size = 302856, upload-time = "2026-04-03T19:14:45.118Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/e4/20/71885d8b97d4f3dde17b1fdb92dbd4908b00541c5a3379787137285f602e/click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d", size = 108379, upload-time = "2026-04-03T19:14:43.505Z" }, ] [[package]] @@ -302,132 +425,139 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, - { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, - { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, - { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, - { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, - { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, - { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, - { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, - { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, - { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, - { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, - { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, - { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, - { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, - { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, - { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, - { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, - { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, - { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, - { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, - { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, - { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, - { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, - { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, - { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, - { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, - { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, - { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, - { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, - { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, - { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, - { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, - { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, - { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, - { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, - { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, - { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, - { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, - { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, - { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, - { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, - { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, - { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, - { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, - { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, - { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, - { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, - { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, - { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, - { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, - { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, +version = "7.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, ] [[package]] name = "cryptography" -version = "46.0.3" +version = "46.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, + { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, + { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, + { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, + { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, + { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, + { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, + { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, + { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" }, + { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" }, + { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" }, + { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" }, + { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" }, + { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" }, + { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" }, + { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" }, + { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, + { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, + { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, + { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, + { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, + { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, + { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, ] [[package]] @@ -448,22 +578,111 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + [[package]] name = "genai-prices" -version = "0.0.50" +version = "0.0.56" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8a/69/e93d54058489dc8167ec0e62a48a35f702c45fa3f36210101c6dbfd48a54/genai_prices-0.0.50.tar.gz", hash = "sha256:9ee56fdddaaaff7f66d3939747eb78fc40d57f9e231cf4911938a67d64f30d84", size = 58692, upload-time = "2026-01-06T15:03:16.491Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/6b/94b3018a672c7775edfb485f0fed8f6068fba75e49b067e8a1ac5eb96764/genai_prices-0.0.56.tar.gz", hash = "sha256:ac24b16a84d0ab97539bfa48dfa4649689de8e3ce71c12ebacef29efb1998045", size = 65872, upload-time = "2026-03-20T20:33:00.732Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/f3/5e5756d273c897bb5d0bfb8079bbfeb65fc6beb8bb1facb76dfda01651e9/genai_prices-0.0.50-py3-none-any.whl", hash = "sha256:ac70a5a0a532cb19591f8a465b24799d887b0241777f612ddac1d7604befa4d0", size = 61331, upload-time = "2026-01-06T15:03:15.486Z" }, + { url = "https://files.pythonhosted.org/packages/a3/f6/8ef7e4c286deb2709d11ca96a5237caae3ef4876ab3c48095856cfd2df30/genai_prices-0.0.56-py3-none-any.whl", hash = "sha256:dbe86be8f3f556bed1b72209ed36851fec8b01793b3b220f42921a4e7da945f6", size = 68966, upload-time = "2026-03-20T20:33:02.555Z" }, ] [[package]] name = "google-api-core" -version = "2.28.1" +version = "2.30.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, @@ -472,14 +691,14 @@ dependencies = [ { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/da/83d7043169ac2c8c7469f0e375610d78ae2160134bf1b80634c482fa079c/google_api_core-2.28.1.tar.gz", hash = "sha256:2b405df02d68e68ce0fbc138559e6036559e685159d148ae5861013dc201baf8", size = 176759, upload-time = "2025-10-28T21:34:51.529Z" } +sdist = { url = "https://files.pythonhosted.org/packages/16/ce/502a57fb0ec752026d24df1280b162294b22a0afb98a326084f9a979138b/google_api_core-2.30.3.tar.gz", hash = "sha256:e601a37f148585319b26db36e219df68c5d07b6382cff2d580e83404e44d641b", size = 177001, upload-time = "2026-04-10T00:41:28.035Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/d4/90197b416cb61cefd316964fd9e7bd8324bcbafabf40eef14a9f20b81974/google_api_core-2.28.1-py3-none-any.whl", hash = "sha256:4021b0f8ceb77a6fb4de6fde4502cecab45062e66ff4f2895169e0b35bc9466c", size = 173706, upload-time = "2025-10-28T21:34:50.151Z" }, + { url = "https://files.pythonhosted.org/packages/03/15/e56f351cf6ef1cfea58e6ac226a7318ed1deb2218c4b3cc9bd9e4b786c5a/google_api_core-2.30.3-py3-none-any.whl", hash = "sha256:a85761ba72c444dad5d611c2220633480b2b6be2521eca69cca2dbb3ffd6bfe8", size = 173274, upload-time = "2026-04-09T22:57:16.198Z" }, ] [[package]] name = "google-api-python-client" -version = "2.187.0" +version = "2.194.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -488,73 +707,69 @@ dependencies = [ { name = "httplib2" }, { name = "uritemplate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/83/60cdacf139d768dd7f0fcbe8d95b418299810068093fdf8228c6af89bb70/google_api_python_client-2.187.0.tar.gz", hash = "sha256:e98e8e8f49e1b5048c2f8276473d6485febc76c9c47892a8b4d1afa2c9ec8278", size = 14068154, upload-time = "2025-11-06T01:48:53.274Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/ab/e83af0eb043e4ccc49571ca7a6a49984e9d00f4e9e6e6f1238d60bc84dce/google_api_python_client-2.194.0.tar.gz", hash = "sha256:db92647bd1a90f40b79c9618461553c2b20b6a43ce7395fa6de07132dc14f023", size = 14443469, upload-time = "2026-04-08T23:07:35.757Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/58/c1e716be1b055b504d80db2c8413f6c6a890a6ae218a65f178b63bc30356/google_api_python_client-2.187.0-py3-none-any.whl", hash = "sha256:d8d0f6d85d7d1d10bdab32e642312ed572bdc98919f72f831b44b9a9cebba32f", size = 14641434, upload-time = "2025-11-06T01:48:50.763Z" }, + { url = "https://files.pythonhosted.org/packages/b0/34/5a624e49f179aa5b0cb87b2ce8093960299030ff40423bfbde09360eb908/google_api_python_client-2.194.0-py3-none-any.whl", hash = "sha256:61eaaac3b8fc8fdf11c08af87abc3d1342d1b37319cc1b57405f86ef7697e717", size = 15016514, upload-time = "2026-04-08T23:07:33.093Z" }, ] [[package]] name = "google-auth" -version = "2.41.1" +version = "2.49.2" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "cachetools" }, + { name = "cryptography" }, { name = "pyasn1-modules" }, - { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/af/5129ce5b2f9688d2fa49b463e544972a7c82b0fdb50980dafee92e121d9f/google_auth-2.41.1.tar.gz", hash = "sha256:b76b7b1f9e61f0cb7e88870d14f6a94aeef248959ef6992670efee37709cbfd2", size = 292284, upload-time = "2025-09-30T22:51:26.363Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/fc/e925290a1ad95c975c459e2df070fac2b90954e13a0370ac505dff78cb99/google_auth-2.49.2.tar.gz", hash = "sha256:c1ae38500e73065dcae57355adb6278cf8b5c8e391994ae9cbadbcb9631ab409", size = 333958, upload-time = "2026-04-10T00:41:21.888Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/a4/7319a2a8add4cc352be9e3efeff5e2aacee917c85ca2fa1647e29089983c/google_auth-2.41.1-py2.py3-none-any.whl", hash = "sha256:754843be95575b9a19c604a848a41be03f7f2afd8c019f716dc1f51ee41c639d", size = 221302, upload-time = "2025-09-30T22:51:24.212Z" }, + { url = "https://files.pythonhosted.org/packages/73/76/d241a5c927433420507215df6cac1b1fa4ac0ba7a794df42a84326c68da8/google_auth-2.49.2-py3-none-any.whl", hash = "sha256:c2720924dfc82dedb962c9f52cabb2ab16714fd0a6a707e40561d217574ed6d5", size = 240638, upload-time = "2026-04-10T00:41:14.501Z" }, ] [[package]] name = "google-auth-httplib2" -version = "0.3.0" +version = "0.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, { name = "httplib2" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/ad/c1f2b1175096a8d04cf202ad5ea6065f108d26be6fc7215876bde4a7981d/google_auth_httplib2-0.3.0.tar.gz", hash = "sha256:177898a0175252480d5ed916aeea183c2df87c1f9c26705d74ae6b951c268b0b", size = 11134, upload-time = "2025-12-15T22:13:51.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/99/107612bef8d24b298bb5a7c8466f908ecda791d43f9466f5c3978f5b24c1/google_auth_httplib2-0.3.1.tar.gz", hash = "sha256:0af542e815784cb64159b4469aa5d71dd41069ba93effa006e1916b1dcd88e55", size = 11152, upload-time = "2026-03-30T22:50:26.766Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/99/d5/3c97526c8796d3caf5f4b3bed2b05e8a7102326f00a334e7a438237f3b22/google_auth_httplib2-0.3.0-py3-none-any.whl", hash = "sha256:426167e5df066e3f5a0fc7ea18768c08e7296046594ce4c8c409c2457dd1f776", size = 9529, upload-time = "2025-12-15T22:13:51.048Z" }, + { url = "https://files.pythonhosted.org/packages/97/e9/93afb14d23a949acaa3f4e7cc51a0024671174e116e35f42850764b99634/google_auth_httplib2-0.3.1-py3-none-any.whl", hash = "sha256:682356a90ef4ba3d06548c37e9112eea6fc00395a11b0303a644c1a86abc275c", size = 9534, upload-time = "2026-03-30T22:49:03.384Z" }, ] [[package]] name = "google-auth-oauthlib" -version = "1.2.3" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, { name = "requests-oauthlib" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/a6/c6336a6ceb682709a4aa39e2e6b5754a458075ca92359512b6cbfcb25ae3/google_auth_oauthlib-1.2.3.tar.gz", hash = "sha256:eb09e450d3cc789ecbc2b3529cb94a713673fd5f7a22c718ad91cf75aedc2ea4", size = 21265, upload-time = "2025-10-30T21:28:19.105Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/82/62482931dcbe5266a2680d0da17096f2aab983ecb320277d9556700ce00e/google_auth_oauthlib-1.3.1.tar.gz", hash = "sha256:14c22c7b3dd3d06dbe44264144409039465effdd1eef94f7ce3710e486cc4bfa", size = 21663, upload-time = "2026-03-30T22:49:56.408Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/07/a54c100da461ffc5968457823fcc665a48fb4b875c68bcfecbfe24a10dbe/google_auth_oauthlib-1.2.3-py3-none-any.whl", hash = "sha256:7c0940e037677f25e71999607493640d071212e7f3c15aa0febea4c47a5a0680", size = 19184, upload-time = "2025-10-30T21:28:17.88Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e0/cb454a95f460903e39f101e950038ec24a072ca69d0a294a6df625cc1627/google_auth_oauthlib-1.3.1-py3-none-any.whl", hash = "sha256:1a139ef23f1318756805b0e95f655c238bffd29655329a2978218248da4ee7f8", size = 19247, upload-time = "2026-03-30T20:02:23.894Z" }, ] [[package]] name = "googleapis-common-protos" -version = "1.72.0" +version = "1.74.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/18/a746c8344152d368a5aac738d4c857012f2c5d1fd2eac7e17b647a7861bd/googleapis_common_protos-1.74.0.tar.gz", hash = "sha256:57971e4eeeba6aad1163c1f0fc88543f965bb49129b8bb55b2b7b26ecab084f1", size = 151254, upload-time = "2026-04-02T21:23:26.679Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b0/be5d3329badb9230b765de6eea66b73abd5944bdeb5afb3562ddcd80ae84/googleapis_common_protos-1.74.0-py3-none-any.whl", hash = "sha256:702216f78610bb510e3f12ac3cafd281b7ac45cc5d86e90ad87e4d301a3426b5", size = 300743, upload-time = "2026-04-02T21:22:49.108Z" }, ] [[package]] -name = "griffe" -version = "1.15.0" +name = "griffelib" +version = "2.0.2" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/82/74f4a3310cdabfbb10da554c3a672847f1ed33c6f61dd472681ce7f1fe67/griffelib-2.0.2.tar.gz", hash = "sha256:3cf20b3bc470e83763ffbf236e0076b1211bac1bc67de13daf494640f2de707e", size = 166461, upload-time = "2026-03-27T11:34:51.091Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/11/8c/c9138d881c79aa0ea9ed83cbd58d5ca75624378b38cee225dcf5c42cc91f/griffelib-2.0.2-py3-none-any.whl", hash = "sha256:925c857658fb1ba40c0772c37acbc2ab650bd794d9c1b9726922e36ea4117ea1", size = 142357, upload-time = "2026-03-27T11:34:46.275Z" }, ] [[package]] @@ -566,6 +781,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] +[[package]] +name = "h2" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, +] + [[package]] name = "httpcore" version = "1.0.9" @@ -581,14 +818,14 @@ wheels = [ [[package]] name = "httplib2" -version = "0.31.0" +version = "0.31.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyparsing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/77/6653db69c1f7ecfe5e3f9726fdadc981794656fcd7d98c4209fecfea9993/httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c", size = 250759, upload-time = "2025-09-11T12:16:03.403Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/1f/e86365613582c027dda5ddb64e1010e57a3d53e99ab8a72093fa13d565ec/httplib2-0.31.2.tar.gz", hash = "sha256:385e0869d7397484f4eab426197a4c020b606edd43372492337c0b4010ae5d24", size = 250800, upload-time = "2026-01-23T11:04:44.165Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24", size = 91148, upload-time = "2025-09-11T12:16:01.803Z" }, + { url = "https://files.pythonhosted.org/packages/2f/90/fd509079dfcab01102c0fdd87f3a9506894bc70afcf9e9785ef6b2b3aff6/httplib2-0.31.2-py3-none-any.whl", hash = "sha256:dbf0c2fa3862acf3c55c078ea9c0bc4481d7dc5117cae71be9514912cf9f8349", size = 91099, upload-time = "2026-01-23T11:04:42.78Z" }, ] [[package]] @@ -606,6 +843,11 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + [[package]] name = "httpx-sse" version = "0.4.3" @@ -615,6 +857,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, ] +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, +] + [[package]] name = "idna" version = "3.11" @@ -656,79 +907,83 @@ wheels = [ [[package]] name = "isort" -version = "7.0.0" +version = "8.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/7c/ec4ab396d31b3b395e2e999c8f46dec78c5e29209fac49d1f4dace04041d/isort-8.0.1.tar.gz", hash = "sha256:171ac4ff559cdc060bcfff550bc8404a486fee0caab245679c2abe7cb253c78d", size = 769592, upload-time = "2026-02-28T10:08:20.685Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, + { url = "https://files.pythonhosted.org/packages/3e/95/c7c34aa53c16353c56d0b802fba48d5f5caa2cdee7958acbcb795c830416/isort-8.0.1-py3-none-any.whl", hash = "sha256:28b89bc70f751b559aeca209e6120393d43fbe2490de0559662be7a9787e3d75", size = 89733, upload-time = "2026-02-28T10:08:19.466Z" }, ] [[package]] name = "jiter" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, - { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, - { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, - { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, - { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, - { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, - { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, - { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, - { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, - { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, - { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/a8/99/45c9f0dbe4a1416b2b9a8a6d1236459540f43d7fb8883cff769a8db0612d/jiter-0.12.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:c46d927acd09c67a9fb1416df45c5a04c27e83aae969267e98fba35b74e99525", size = 312478, upload-time = "2025-11-09T20:48:10.898Z" }, - { url = "https://files.pythonhosted.org/packages/4c/a7/54ae75613ba9e0f55fcb0bc5d1f807823b5167cc944e9333ff322e9f07dd/jiter-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:774ff60b27a84a85b27b88cd5583899c59940bcc126caca97eb2a9df6aa00c49", size = 318706, upload-time = "2025-11-09T20:48:12.266Z" }, - { url = "https://files.pythonhosted.org/packages/59/31/2aa241ad2c10774baf6c37f8b8e1f39c07db358f1329f4eb40eba179c2a2/jiter-0.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5433fab222fb072237df3f637d01b81f040a07dcac1cb4a5c75c7aa9ed0bef1", size = 351894, upload-time = "2025-11-09T20:48:13.673Z" }, - { url = "https://files.pythonhosted.org/packages/54/4f/0f2759522719133a9042781b18cc94e335b6d290f5e2d3e6899d6af933e3/jiter-0.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f8c593c6e71c07866ec6bfb790e202a833eeec885022296aff6b9e0b92d6a70e", size = 365714, upload-time = "2025-11-09T20:48:15.083Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6f/806b895f476582c62a2f52c453151edd8a0fde5411b0497baaa41018e878/jiter-0.12.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90d32894d4c6877a87ae00c6b915b609406819dce8bc0d4e962e4de2784e567e", size = 478989, upload-time = "2025-11-09T20:48:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/86/6c/012d894dc6e1033acd8db2b8346add33e413ec1c7c002598915278a37f79/jiter-0.12.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:798e46eed9eb10c3adbbacbd3bdb5ecd4cf7064e453d00dbef08802dae6937ff", size = 378615, upload-time = "2025-11-09T20:48:18.614Z" }, - { url = "https://files.pythonhosted.org/packages/87/30/d718d599f6700163e28e2c71c0bbaf6dace692e7df2592fd793ac9276717/jiter-0.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3f1368f0a6719ea80013a4eb90ba72e75d7ea67cfc7846db2ca504f3df0169a", size = 364745, upload-time = "2025-11-09T20:48:20.117Z" }, - { url = "https://files.pythonhosted.org/packages/8f/85/315b45ce4b6ddc7d7fceca24068543b02bdc8782942f4ee49d652e2cc89f/jiter-0.12.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65f04a9d0b4406f7e51279710b27484af411896246200e461d80d3ba0caa901a", size = 386502, upload-time = "2025-11-09T20:48:21.543Z" }, - { url = "https://files.pythonhosted.org/packages/74/0b/ce0434fb40c5b24b368fe81b17074d2840748b4952256bab451b72290a49/jiter-0.12.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:fd990541982a24281d12b67a335e44f117e4c6cbad3c3b75c7dea68bf4ce3a67", size = 519845, upload-time = "2025-11-09T20:48:22.964Z" }, - { url = "https://files.pythonhosted.org/packages/e8/a3/7a7a4488ba052767846b9c916d208b3ed114e3eb670ee984e4c565b9cf0d/jiter-0.12.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:b111b0e9152fa7df870ecaebb0bd30240d9f7fff1f2003bcb4ed0f519941820b", size = 510701, upload-time = "2025-11-09T20:48:24.483Z" }, - { url = "https://files.pythonhosted.org/packages/c3/16/052ffbf9d0467b70af24e30f91e0579e13ded0c17bb4a8eb2aed3cb60131/jiter-0.12.0-cp314-cp314-win32.whl", hash = "sha256:a78befb9cc0a45b5a5a0d537b06f8544c2ebb60d19d02c41ff15da28a9e22d42", size = 205029, upload-time = "2025-11-09T20:48:25.749Z" }, - { url = "https://files.pythonhosted.org/packages/e4/18/3cf1f3f0ccc789f76b9a754bdb7a6977e5d1d671ee97a9e14f7eb728d80e/jiter-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:e1fe01c082f6aafbe5c8faf0ff074f38dfb911d53f07ec333ca03f8f6226debf", size = 204960, upload-time = "2025-11-09T20:48:27.415Z" }, - { url = "https://files.pythonhosted.org/packages/02/68/736821e52ecfdeeb0f024b8ab01b5a229f6b9293bbdb444c27efade50b0f/jiter-0.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:d72f3b5a432a4c546ea4bedc84cce0c3404874f1d1676260b9c7f048a9855451", size = 185529, upload-time = "2025-11-09T20:48:29.125Z" }, - { url = "https://files.pythonhosted.org/packages/30/61/12ed8ee7a643cce29ac97c2281f9ce3956eb76b037e88d290f4ed0d41480/jiter-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e6ded41aeba3603f9728ed2b6196e4df875348ab97b28fc8afff115ed42ba7a7", size = 318974, upload-time = "2025-11-09T20:48:30.87Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c6/f3041ede6d0ed5e0e79ff0de4c8f14f401bbf196f2ef3971cdbe5fd08d1d/jiter-0.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a947920902420a6ada6ad51892082521978e9dd44a802663b001436e4b771684", size = 345932, upload-time = "2025-11-09T20:48:32.658Z" }, - { url = "https://files.pythonhosted.org/packages/d5/5d/4d94835889edd01ad0e2dbfc05f7bdfaed46292e7b504a6ac7839aa00edb/jiter-0.12.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:add5e227e0554d3a52cf390a7635edaffdf4f8fce4fdbcef3cc2055bb396a30c", size = 367243, upload-time = "2025-11-09T20:48:34.093Z" }, - { url = "https://files.pythonhosted.org/packages/fd/76/0051b0ac2816253a99d27baf3dda198663aff882fa6ea7deeb94046da24e/jiter-0.12.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9b1cda8fcb736250d7e8711d4580ebf004a46771432be0ae4796944b5dfa5d", size = 479315, upload-time = "2025-11-09T20:48:35.507Z" }, - { url = "https://files.pythonhosted.org/packages/70/ae/83f793acd68e5cb24e483f44f482a1a15601848b9b6f199dacb970098f77/jiter-0.12.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deeb12a2223fe0135c7ff1356a143d57f95bbf1f4a66584f1fc74df21d86b993", size = 380714, upload-time = "2025-11-09T20:48:40.014Z" }, - { url = "https://files.pythonhosted.org/packages/b1/5e/4808a88338ad2c228b1126b93fcd8ba145e919e886fe910d578230dabe3b/jiter-0.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c596cc0f4cb574877550ce4ecd51f8037469146addd676d7c1a30ebe6391923f", size = 365168, upload-time = "2025-11-09T20:48:41.462Z" }, - { url = "https://files.pythonhosted.org/packages/0c/d4/04619a9e8095b42aef436b5aeb4c0282b4ff1b27d1db1508df9f5dc82750/jiter-0.12.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ab4c823b216a4aeab3fdbf579c5843165756bd9ad87cc6b1c65919c4715f783", size = 387893, upload-time = "2025-11-09T20:48:42.921Z" }, - { url = "https://files.pythonhosted.org/packages/17/ea/d3c7e62e4546fdc39197fa4a4315a563a89b95b6d54c0d25373842a59cbe/jiter-0.12.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:e427eee51149edf962203ff8db75a7514ab89be5cb623fb9cea1f20b54f1107b", size = 520828, upload-time = "2025-11-09T20:48:44.278Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0b/c6d3562a03fd767e31cb119d9041ea7958c3c80cb3d753eafb19b3b18349/jiter-0.12.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:edb868841f84c111255ba5e80339d386d937ec1fdce419518ce1bd9370fac5b6", size = 511009, upload-time = "2025-11-09T20:48:45.726Z" }, - { url = "https://files.pythonhosted.org/packages/aa/51/2cb4468b3448a8385ebcd15059d325c9ce67df4e2758d133ab9442b19834/jiter-0.12.0-cp314-cp314t-win32.whl", hash = "sha256:8bbcfe2791dfdb7c5e48baf646d37a6a3dcb5a97a032017741dea9f817dca183", size = 205110, upload-time = "2025-11-09T20:48:47.033Z" }, - { url = "https://files.pythonhosted.org/packages/b2/c5/ae5ec83dec9c2d1af805fd5fe8f74ebded9c8670c5210ec7820ce0dbeb1e/jiter-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2fa940963bf02e1d8226027ef461e36af472dea85d36054ff835aeed944dd873", size = 205223, upload-time = "2025-11-09T20:48:49.076Z" }, - { url = "https://files.pythonhosted.org/packages/97/9a/3c5391907277f0e55195550cf3fa8e293ae9ee0c00fb402fec1e38c0c82f/jiter-0.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:506c9708dd29b27288f9f8f1140c3cb0e3d8ddb045956d7757b1fa0e0f39a473", size = 185564, upload-time = "2025-11-09T20:48:50.376Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, - { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/c1/0cddc6eb17d4c53a99840953f95dd3accdc5cfc7a337b0e9b26476276be9/jiter-0.14.0.tar.gz", hash = "sha256:e8a39e66dac7153cf3f964a12aad515afa8d74938ec5cc0018adcdae5367c79e", size = 165725, upload-time = "2026-04-10T14:28:42.01Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/68/7390a418f10897da93b158f2d5a8bd0bcd73a0f9ec3bb36917085bb759ef/jiter-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb2ce3a7bc331256dfb14cefc34832366bb28a9aca81deaf43bbf2a5659e607", size = 316295, upload-time = "2026-04-10T14:26:24.887Z" }, + { url = "https://files.pythonhosted.org/packages/60/a0/5854ac00ff63551c52c6c89534ec6aba4b93474e7924d64e860b1c94165b/jiter-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5252a7ca23785cef5d02d4ece6077a1b556a410c591b379f82091c3001e14844", size = 315898, upload-time = "2026-04-10T14:26:26.601Z" }, + { url = "https://files.pythonhosted.org/packages/41/a1/4f44832650a16b18e8391f1bf1d6ca4909bc738351826bcc198bba4357f4/jiter-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c409578cbd77c338975670ada777add4efd53379667edf0aceea730cabede6fb", size = 343730, upload-time = "2026-04-10T14:26:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/48/64/a329e9d469f86307203594b1707e11ae51c3348d03bfd514a5f997870012/jiter-0.14.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ede4331a1899d604463369c730dbb961ffdc5312bc7f16c41c2896415b1304a", size = 370102, upload-time = "2026-04-10T14:26:30.089Z" }, + { url = "https://files.pythonhosted.org/packages/94/c1/5e3dfc59635aa4d4c7bd20a820ac1d09b8ed851568356802cf1c08edb3cf/jiter-0.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92cd8b6025981a041f5310430310b55b25ca593972c16407af8837d3d7d2ca01", size = 461335, upload-time = "2026-04-10T14:26:31.911Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1b/dd157009dbc058f7b00108f545ccb72a2d56461395c4fc7b9cfdccb00af4/jiter-0.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:351bf6eda4e3a7ceb876377840c702e9a3e4ecc4624dbfb2d6463c67ae52637d", size = 378536, upload-time = "2026-04-10T14:26:33.595Z" }, + { url = "https://files.pythonhosted.org/packages/91/78/256013667b7c10b8834f8e6e54cd3e562d4c6e34227a1596addccc05e38c/jiter-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1dcfbeb93d9ecd9ca128bbf8910120367777973fa193fb9a39c31237d8df165", size = 353859, upload-time = "2026-04-10T14:26:35.098Z" }, + { url = "https://files.pythonhosted.org/packages/de/d9/137d65ade9093a409fe80955ce60b12bb753722c986467aeda47faf450ad/jiter-0.14.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ae039aaef8de3f8157ecc1fdd4d85043ac4f57538c245a0afaecb8321ec951c3", size = 357626, upload-time = "2026-04-10T14:26:36.685Z" }, + { url = "https://files.pythonhosted.org/packages/2e/48/76750835b87029342727c1a268bea8878ab988caf81ee4e7b880900eeb5a/jiter-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7d9d51eb96c82a9652933bd769fe6de66877d6eb2b2440e281f2938c51b5643e", size = 393172, upload-time = "2026-04-10T14:26:38.097Z" }, + { url = "https://files.pythonhosted.org/packages/a6/60/456c4e81d5c8045279aefe60e9e483be08793828800a4e64add8fdde7f2a/jiter-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d824ca4148b705970bf4e120924a212fdfca9859a73e42bd7889a63a4ea6bb98", size = 520300, upload-time = "2026-04-10T14:26:39.532Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/2020e0984c235f678dced38fe4eec3058cf528e6af36ebf969b410305941/jiter-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff3a6465b3a0f54b1a430f45c3c0ba7d61ceb45cbc3e33f9e1a7f638d690baf3", size = 553059, upload-time = "2026-04-10T14:26:40.991Z" }, + { url = "https://files.pythonhosted.org/packages/ef/32/e2d298e1a22a4bbe6062136d1c7192db7dba003a6975e51d9a9eecabc4c2/jiter-0.14.0-cp312-cp312-win32.whl", hash = "sha256:5dec7c0a3e98d2a3f8a2e67382d0d7c3ac60c69103a4b271da889b4e8bb1e129", size = 206030, upload-time = "2026-04-10T14:26:42.517Z" }, + { url = "https://files.pythonhosted.org/packages/36/ac/96369141b3d8a4a8e4590e983085efe1c436f35c0cda940dd76d942e3e40/jiter-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:fc7e37b4b8bc7e80a63ad6cfa5fc11fab27dbfea4cc4ae644b1ab3f273dc348f", size = 201603, upload-time = "2026-04-10T14:26:44.328Z" }, + { url = "https://files.pythonhosted.org/packages/01/c3/75d847f264647017d7e3052bbcc8b1e24b95fa139c320c5f5066fa7a0bdd/jiter-0.14.0-cp312-cp312-win_arm64.whl", hash = "sha256:ee4a72f12847ef29b072aee9ad5474041ab2924106bdca9fcf5d7d965853e057", size = 191525, upload-time = "2026-04-10T14:26:46Z" }, + { url = "https://files.pythonhosted.org/packages/97/2a/09f70020898507a89279659a1afe3364d57fc1b2c89949081975d135f6f5/jiter-0.14.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:af72f204cf4d44258e5b4c1745130ac45ddab0e71a06333b01de660ab4187a94", size = 315502, upload-time = "2026-04-10T14:26:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/d6/be/080c96a45cd74f9fce5db4fd68510b88087fb37ffe2541ff73c12db92535/jiter-0.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4b77da71f6e819be5fbcec11a453fde5b1d0267ef6ed487e2a392fd8e14e4e3a", size = 314870, upload-time = "2026-04-10T14:26:49.149Z" }, + { url = "https://files.pythonhosted.org/packages/7d/5e/2d0fee155826a968a832cc32438de5e2a193292c8721ca70d0b53e58245b/jiter-0.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f4ea612fe8b84b8b04e51d0e78029ecf3466348e25973f953de6e6a59aa4c1", size = 343406, upload-time = "2026-04-10T14:26:50.762Z" }, + { url = "https://files.pythonhosted.org/packages/70/af/bf9ee0d3a4f8dc0d679fc1337f874fe60cdbf841ebbb304b374e1c9aaceb/jiter-0.14.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62fe2451f8fcc0240261e6a4df18ecbcd58327857e61e625b2393ea3b468aac9", size = 369415, upload-time = "2026-04-10T14:26:52.188Z" }, + { url = "https://files.pythonhosted.org/packages/0f/83/8e8561eadba31f4d3948a5b712fb0447ec71c3560b57a855449e7b8ddc98/jiter-0.14.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6112f26f5afc75bcb475787d29da3aa92f9d09c7858f632f4be6ffe607be82e9", size = 461456, upload-time = "2026-04-10T14:26:53.611Z" }, + { url = "https://files.pythonhosted.org/packages/f6/c9/c5299e826a5fe6108d172b344033f61c69b1bb979dd8d9ddd4278a160971/jiter-0.14.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:215a6cb8fb7dc702aa35d475cc00ddc7f970e5c0b1417fb4b4ac5d82fa2a29db", size = 378488, upload-time = "2026-04-10T14:26:55.211Z" }, + { url = "https://files.pythonhosted.org/packages/5d/37/c16d9d15c0a471b8644b1abe3c82668092a707d9bedcf076f24ff2e380cd/jiter-0.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ab96a30fb3cb2c7e0cd33f7616c8860da5f5674438988a54ac717caccdbaa", size = 353242, upload-time = "2026-04-10T14:26:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/58/ea/8050cb0dc654e728e1bfacbc0c640772f2181af5dedd13ae70145743a439/jiter-0.14.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:3a99c1387b1f2928f799a9de899193484d66206a50e98233b6b088a7f0c1edb2", size = 356823, upload-time = "2026-04-10T14:26:58.281Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/cf71506d270e5f84d97326bf220e47aed9b95e9a4a060758fb07772170ab/jiter-0.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ab18d11074485438695f8d34a1b6da61db9754248f96d51341956607a8f39985", size = 392564, upload-time = "2026-04-10T14:27:00.018Z" }, + { url = "https://files.pythonhosted.org/packages/b0/cc/8c6c74a3efb5bd671bfd14f51e8a73375464ca914b1551bc3b40e26ac2c9/jiter-0.14.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:801028dcfc26ac0895e4964cbc0fd62c73be9fd4a7d7b1aaf6e5790033a719b7", size = 520322, upload-time = "2026-04-10T14:27:01.664Z" }, + { url = "https://files.pythonhosted.org/packages/41/24/68d7b883ec959884ddf00d019b2e0e82ba81b167e1253684fa90519ce33c/jiter-0.14.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ad425b087aafb4a1c7e1e98a279200743b9aaf30c3e0ba723aec93f061bd9bc8", size = 552619, upload-time = "2026-04-10T14:27:03.316Z" }, + { url = "https://files.pythonhosted.org/packages/b6/89/b1a0985223bbf3150ff9e8f46f98fc9360c1de94f48abe271bbe1b465682/jiter-0.14.0-cp313-cp313-win32.whl", hash = "sha256:882bcb9b334318e233950b8be366fe5f92c86b66a7e449e76975dfd6d776a01f", size = 205699, upload-time = "2026-04-10T14:27:04.662Z" }, + { url = "https://files.pythonhosted.org/packages/4c/19/3f339a5a7f14a11730e67f6be34f9d5105751d547b615ef593fa122a5ded/jiter-0.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:9b8c571a5dba09b98bd3462b5a53f27209a5cbbe85670391692ede71974e979f", size = 201323, upload-time = "2026-04-10T14:27:06.139Z" }, + { url = "https://files.pythonhosted.org/packages/50/56/752dd89c84be0e022a8ea3720bcfa0a8431db79a962578544812ce061739/jiter-0.14.0-cp313-cp313-win_arm64.whl", hash = "sha256:34f19dcc35cb1abe7c369b3756babf8c7f04595c0807a848df8f26ef8298ef92", size = 191099, upload-time = "2026-04-10T14:27:07.564Z" }, + { url = "https://files.pythonhosted.org/packages/91/28/292916f354f25a1fe8cf2c918d1415c699a4a659ae00be0430e1c5d9ffea/jiter-0.14.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e89bcd7d426a75bb4952c696b267075790d854a07aad4c9894551a82c5b574ab", size = 320880, upload-time = "2026-04-10T14:27:09.326Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c7/b002a7d8b8957ac3d469bd59c18ef4b1595a5216ae0de639a287b9816023/jiter-0.14.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b25beaa0d4447ea8c7ae0c18c688905d34840d7d0b937f2f7bdd52162c98a40", size = 346563, upload-time = "2026-04-10T14:27:11.287Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3b/f8d07580d8706021d255a6356b8fab13ee4c869412995550ce6ed4ddf97d/jiter-0.14.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:651a8758dd413c51e3b7f6557cdc6921faf70b14106f45f969f091f5cda990ea", size = 357928, upload-time = "2026-04-10T14:27:12.729Z" }, + { url = "https://files.pythonhosted.org/packages/47/5b/ac1a974da29e35507230383110ffec59998b290a8732585d04e19a9eb5ba/jiter-0.14.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e1a7eead856a5038a8d291f1447176ab0b525c77a279a058121b5fccee257f6f", size = 203519, upload-time = "2026-04-10T14:27:14.125Z" }, + { url = "https://files.pythonhosted.org/packages/96/6d/9fc8433d667d2454271378a79747d8c76c10b51b482b454e6190e511f244/jiter-0.14.0-cp313-cp313t-win_arm64.whl", hash = "sha256:2e692633a12cda97e352fdcd1c4acc971b1c28707e1e33aeef782b0cbf051975", size = 190113, upload-time = "2026-04-10T14:27:16.638Z" }, + { url = "https://files.pythonhosted.org/packages/4f/1e/354ed92461b165bd581f9ef5150971a572c873ec3b68a916d5aa91da3cc2/jiter-0.14.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:6f396837fc7577871ca8c12edaf239ed9ccef3bbe39904ae9b8b63ce0a48b140", size = 315277, upload-time = "2026-04-10T14:27:18.109Z" }, + { url = "https://files.pythonhosted.org/packages/a6/95/8c7c7028aa8636ac21b7a55faef3e34215e6ed0cbf5ae58258427f621aa3/jiter-0.14.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a4d50ea3d8ba4176f79754333bd35f1bbcd28e91adc13eb9b7ca91bc52a6cef9", size = 315923, upload-time = "2026-04-10T14:27:19.603Z" }, + { url = "https://files.pythonhosted.org/packages/47/40/e2a852a44c4a089f2681a16611b7ce113224a80fd8504c46d78491b47220/jiter-0.14.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce17f8a050447d1b4153bda4fb7d26e6a9e74eb4f4a41913f30934c5075bf615", size = 344943, upload-time = "2026-04-10T14:27:21.262Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1f/670f92adee1e9895eac41e8a4d623b6da68c4d46249d8b556b60b63f949e/jiter-0.14.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4f1c4b125e1652aefbc2e2c1617b60a160ab789d180e3d423c41439e5f32850", size = 369725, upload-time = "2026-04-10T14:27:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/01/2f/541c9ba567d05de1c4874a0f8f8c5e3fd78e2b874266623da9a775cf46e0/jiter-0.14.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be808176a6a3a14321d18c603f2d40741858a7c4fc982f83232842689fe86dd9", size = 461210, upload-time = "2026-04-10T14:27:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a9/c31cbec09627e0d5de7aeaec7690dba03e090caa808fefd8133137cf45bc/jiter-0.14.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26679d58ba816f88c3849306dd58cb863a90a1cf352cdd4ef67e30ccf8a77994", size = 380002, upload-time = "2026-04-10T14:27:26.155Z" }, + { url = "https://files.pythonhosted.org/packages/50/02/3c05c1666c41904a2f607475a73e7a4763d1cbde2d18229c4f85b22dc253/jiter-0.14.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80381f5a19af8fa9aef743f080e34f6b25ebd89656475f8cf0470ec6157052aa", size = 354678, upload-time = "2026-04-10T14:27:27.701Z" }, + { url = "https://files.pythonhosted.org/packages/7d/97/e15b33545c2b13518f560d695f974b9891b311641bdcf178d63177e8801e/jiter-0.14.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:004df5fdb8ecbd6d99f3227df18ba1a259254c4359736a2e6f036c944e02d7c5", size = 358920, upload-time = "2026-04-10T14:27:29.256Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d2/8b1461def6b96ba44530df20d07ef7a1c7da22f3f9bf1727e2d611077bf1/jiter-0.14.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cff5708f7ed0fa098f2b53446c6fa74c48469118e5cd7497b4f1cd569ab06928", size = 394512, upload-time = "2026-04-10T14:27:31.344Z" }, + { url = "https://files.pythonhosted.org/packages/e3/88/837566dd6ed6e452e8d3205355afd484ce44b2533edfa4ed73a298ea893e/jiter-0.14.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:2492e5f06c36a976d25c7cc347a60e26d5470178d44cde1b9b75e60b4e519f28", size = 521120, upload-time = "2026-04-10T14:27:33.299Z" }, + { url = "https://files.pythonhosted.org/packages/89/6b/b00b45c4d1b4c031777fe161d620b755b5b02cdade1e316dcb46e4471d63/jiter-0.14.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7609cfbe3a03d37bfdbf5052012d5a879e72b83168a363deae7b3a26564d57de", size = 553668, upload-time = "2026-04-10T14:27:34.868Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d8/6fe5b42011d19397433d345716eac16728ac241862a2aac9c91923c7509a/jiter-0.14.0-cp314-cp314-win32.whl", hash = "sha256:7282342d32e357543565286b6450378c3cd402eea333fc1ebe146f1fabb306fc", size = 207001, upload-time = "2026-04-10T14:27:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/e5/43/5c2e08da1efad5e410f0eaaabeadd954812612c33fbbd8fd5328b489139d/jiter-0.14.0-cp314-cp314-win_amd64.whl", hash = "sha256:bd77945f38866a448e73b0b7637366afa814d4617790ecd88a18ca74377e6c02", size = 202187, upload-time = "2026-04-10T14:27:38Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1f/6e39ac0b4cdfa23e606af5b245df5f9adaa76f35e0c5096790da430ca506/jiter-0.14.0-cp314-cp314-win_arm64.whl", hash = "sha256:f2d4c61da0821ee42e0cdf5489da60a6d074306313a377c2b35af464955a3611", size = 192257, upload-time = "2026-04-10T14:27:39.504Z" }, + { url = "https://files.pythonhosted.org/packages/05/57/7dbc0ffbbb5176a27e3518716608aa464aee2e2887dc938f0b900a120449/jiter-0.14.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1bf7ff85517dd2f20a5750081d2b75083c1b269cf75afc7511bdf1f9548beb3b", size = 323441, upload-time = "2026-04-10T14:27:41.039Z" }, + { url = "https://files.pythonhosted.org/packages/83/6e/7b3314398d8983f06b557aa21b670511ec72d3b79a68ee5e4d9bff972286/jiter-0.14.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8ef8791c3e78d6c6b157c6d360fbb5c715bebb8113bc6a9303c5caff012754a", size = 348109, upload-time = "2026-04-10T14:27:42.552Z" }, + { url = "https://files.pythonhosted.org/packages/ae/4f/8dc674bcd7db6dba566de73c08c763c337058baff1dbeb34567045b27cdc/jiter-0.14.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e74663b8b10da1fe0f4e4703fd7980d24ad17174b6bb35d8498d6e3ebce2ae6a", size = 368328, upload-time = "2026-04-10T14:27:44.574Z" }, + { url = "https://files.pythonhosted.org/packages/3b/5f/188e09a1f20906f98bbdec44ed820e19f4e8eb8aff88b9d1a5a497587ff3/jiter-0.14.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1aca29ba52913f78362ec9c2da62f22cdc4c3083313403f90c15460979b84d9b", size = 463301, upload-time = "2026-04-10T14:27:46.717Z" }, + { url = "https://files.pythonhosted.org/packages/ac/f0/19046ef965ed8f349e8554775bb12ff4352f443fbe12b95d31f575891256/jiter-0.14.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b39b7d87a952b79949af5fef44d2544e58c21a28da7f1bae3ef166455c61746", size = 378891, upload-time = "2026-04-10T14:27:48.32Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c3/da43bd8431ee175695777ee78cf0e93eacbb47393ff493f18c45231b427d/jiter-0.14.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d918a68b26e9fab068c2b5453577ef04943ab2807b9a6275df2a812599a310", size = 360749, upload-time = "2026-04-10T14:27:49.88Z" }, + { url = "https://files.pythonhosted.org/packages/72/26/e054771be889707c6161dbdec9c23d33a9ec70945395d70f07cfea1e9a6f/jiter-0.14.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:b08997c35aee1201c1a5361466a8fb9162d03ae7bf6568df70b6c859f1e654a4", size = 358526, upload-time = "2026-04-10T14:27:51.504Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0f/7bea65ea2a6d91f2bf989ff11a18136644392bf2b0497a1fa50934c30a9c/jiter-0.14.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:260bf7ca20704d58d41f669e5e9fe7fe2fa72901a6b324e79056f5d52e9c9be2", size = 393926, upload-time = "2026-04-10T14:27:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/b1ff7d70deef61ac0b7c6c2f12d2ace950cdeecb4fdc94500a0926802857/jiter-0.14.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:37826e3df29e60f30a382f9294348d0238ef127f4b5d7f5f8da78b5b9e050560", size = 521052, upload-time = "2026-04-10T14:27:55.058Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7b/3b0649983cbaf15eda26a414b5b1982e910c67bd6f7b1b490f3cfc76896a/jiter-0.14.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:645be49c46f2900937ba0eaf871ad5183c96858c0af74b6becc7f4e367e36e06", size = 553716, upload-time = "2026-04-10T14:27:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/97/f8/33d78c83bd93ae0c0af05293a6660f88a1977caef39a6d72a84afab94ce0/jiter-0.14.0-cp314-cp314t-win32.whl", hash = "sha256:2f7877ed45118de283786178eceaf877110abacd04fde31efff3940ae9672674", size = 207957, upload-time = "2026-04-10T14:27:59.285Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ac/2b760516c03e2227826d1f7025d89bf6bf6357a28fe75c2a2800873c50bf/jiter-0.14.0-cp314-cp314t-win_amd64.whl", hash = "sha256:14c0cb10337c49f5eafe8e7364daca5e29a020ea03580b8f8e6c597fed4e1588", size = 204690, upload-time = "2026-04-10T14:28:00.962Z" }, + { url = "https://files.pythonhosted.org/packages/dc/2e/a44c20c58aeed0355f2d326969a181696aeb551a25195f47563908a815be/jiter-0.14.0-cp314-cp314t-win_arm64.whl", hash = "sha256:5419d4aa2024961da9fe12a9cfe7484996735dca99e8e090b5c88595ef1951ff", size = 191338, upload-time = "2026-04-10T14:28:02.853Z" }, + { url = "https://files.pythonhosted.org/packages/21/42/9042c3f3019de4adcb8c16591c325ec7255beea9fcd33a42a43f3b0b1000/jiter-0.14.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:fbd9e482663ca9d005d051330e4d2d8150bb208a209409c10f7e7dfdf7c49da9", size = 308810, upload-time = "2026-04-10T14:28:34.673Z" }, + { url = "https://files.pythonhosted.org/packages/60/cf/a7e19b308bd86bb04776803b1f01a5f9a287a4c55205f4708827ee487fbf/jiter-0.14.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:33a20d838b91ef376b3a56896d5b04e725c7df5bc4864cc6569cf046a8d73b6d", size = 308443, upload-time = "2026-04-10T14:28:36.658Z" }, + { url = "https://files.pythonhosted.org/packages/ca/44/e26ede3f0caeff93f222559cb0cc4ca68579f07d009d7b6010c5b586f9b1/jiter-0.14.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:432c4db5255d86a259efde91e55cb4c8d18c0521d844c9e2e7efcce3899fb016", size = 343039, upload-time = "2026-04-10T14:28:38.356Z" }, + { url = "https://files.pythonhosted.org/packages/da/e9/1f9ada30cef7b05e74bb06f52127e7a724976c225f46adb65c37b1dadfb6/jiter-0.14.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f00d94b281174144d6532a04b66a12cb866cbdc47c3af3bfe2973677f9861a", size = 349613, upload-time = "2026-04-10T14:28:40.066Z" }, ] [[package]] @@ -760,7 +1015,7 @@ wheels = [ [[package]] name = "logfire" -version = "4.17.0" +version = "4.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "executing" }, @@ -771,18 +1026,18 @@ dependencies = [ { name = "rich" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/74/88e8fec6e3fe247a7cdaeba777cbf7aced11de33db423693690e20ab8356/logfire-4.17.0.tar.gz", hash = "sha256:693d47d6b8b0a8f9fd8112d958eb9e3ae00fe5d323e1eee468e4bc6379bb2c4f", size = 558897, upload-time = "2026-01-07T10:52:16.148Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/64/f927d4f9de1f1371047b9016adba1ec2e08258301708d548d41f86f27772/logfire-4.32.0.tar.gz", hash = "sha256:f1dc9d756a4b28f0483645244aaf3ea8535b8e2ae5a1068442a968ca0c746304", size = 1088575, upload-time = "2026-04-10T19:36:54.172Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/68/c49baec77c56dcf5698daaa66a097523063d55b2feaecc968000518521a1/logfire-4.17.0-py3-none-any.whl", hash = "sha256:1ed1064a4126f48503f7832ce6039425dff70128d145552d7981ac9ce2151b17", size = 232456, upload-time = "2026-01-07T10:52:13.394Z" }, + { url = "https://files.pythonhosted.org/packages/02/33/81b13e1f2044b5fe0112068a2494526db9cfdf784030a2ea57688279360a/logfire-4.32.0-py3-none-any.whl", hash = "sha256:d9cff51c3c093c4161ece87a65e6ac6e2d862258b62494c30d93d713e9858758", size = 312412, upload-time = "2026-04-10T19:36:50.97Z" }, ] [[package]] name = "logfire-api" -version = "4.17.0" +version = "4.32.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8a/2a/76d8fbafa881cb03d5ad6e1d67d537e8c308ae7145812b8891f7b8751224/logfire_api-4.17.0.tar.gz", hash = "sha256:4647dad05146a68af441d59a7746a966df4c2581b316616f1210f8cf74931353", size = 58305, upload-time = "2026-01-07T10:52:17.768Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/17/7a50c55077e50b088e056a90e0754836ee6f1cb31ba6a2cd7f4282afd70b/logfire_api-4.32.0.tar.gz", hash = "sha256:aae7d7f1e38d04c6fa9449b15b18cc77336474feae56afc507e6f053aa1afb83", size = 78813, upload-time = "2026-04-10T19:36:55.69Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/bc/3844e103dca998dcc195d6ef09e0f29d9000bac870117db1dd59a29bfeef/logfire_api-4.17.0-py3-none-any.whl", hash = "sha256:80a4b79cd9918934cdf2043d944cfb04182708178d846273484d47f3619a5a39", size = 96146, upload-time = "2026-01-07T10:52:15.088Z" }, + { url = "https://files.pythonhosted.org/packages/58/56/fe8d8aa60e796e059992fb7359ec5dda4ef72db4fccfbd362a2ee0595ec1/logfire_api-4.32.0-py3-none-any.whl", hash = "sha256:062526b31ca5e4bde5455bd5230bfb713df23189aedb370c8c47c6ed8ec02a37", size = 124427, upload-time = "2026-04-10T19:36:52.695Z" }, ] [[package]] @@ -799,7 +1054,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.25.0" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -817,9 +1072,9 @@ dependencies = [ { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/eb/c0cfc62075dc6e1ec1c64d352ae09ac051d9334311ed226f1f425312848a/mcp-1.27.0.tar.gz", hash = "sha256:d3dc35a7eec0d458c1da4976a48f982097ddaab87e278c5511d5a4a56e852b83", size = 607509, upload-time = "2026-04-02T14:48:08.88Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, + { url = "https://files.pythonhosted.org/packages/9c/46/f6b4ad632c67ef35209a66127e4bddc95759649dd595f71f13fba11bdf9a/mcp-1.27.0-py3-none-any.whl", hash = "sha256:5ce1fa81614958e267b21fb2aa34e0aea8e2c6ede60d52aba45fd47246b4d741", size = 215967, upload-time = "2026-04-02T14:48:07.24Z" }, ] [package.optional-dependencies] @@ -837,18 +1092,111 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "microsoft-kiota-abstractions" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, + { name = "std-uritemplate" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/28/d22b26bc7b6d6947250de92f87d536d7e271de638660c9571821efac3302/microsoft_kiota_abstractions-1.10.1.tar.gz", hash = "sha256:ff922f6ac7e4a538d253e5bacb18f5c8c837d0273fb436eec2f0500c70230d96", size = 24465, upload-time = "2026-04-08T16:16:53.598Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/37/83/a099731bb11df9f3e2c39364a94579523aaa20c315a89ee69d8b7c851436/microsoft_kiota_abstractions-1.10.1-py3-none-any.whl", hash = "sha256:31d3f0581884eb221899f355834d86ceba6289b4b9df23e63d01f02aac6c4d0b", size = 44456, upload-time = "2026-04-08T16:16:54.646Z" }, +] + +[[package]] +name = "microsoft-kiota-authentication-azure" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "azure-core" }, + { name = "microsoft-kiota-abstractions" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/b6/8bccdf62dedae77ad2bc77f27292880420d5190cea163d77312ee8e69551/microsoft_kiota_authentication_azure-1.10.1.tar.gz", hash = "sha256:7de150868dab399b4ac36cb17a0ae726ff616079cee9b0a72e7d5e06f5c652e9", size = 4992, upload-time = "2026-04-08T16:17:05.891Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/99/325df634219d113d2418c76883f5cf9debf0ebb1ff04fb04438f7bc56284/microsoft_kiota_authentication_azure-1.10.1-py3-none-any.whl", hash = "sha256:0f82de6dbfc55d3b1df5c42d5c82631014f9db9c45ba0d0a2cdf13853f78649d", size = 6953, upload-time = "2026-04-08T16:17:06.548Z" }, +] + +[[package]] +name = "microsoft-kiota-http" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "microsoft-kiota-abstractions" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/2c/564a5cd50eb90f05bad0c970f832ad71d382bbe9003f0272c859492a01f7/microsoft_kiota_http-1.10.1.tar.gz", hash = "sha256:ee8eec8e35c467a22aedbe5f9b6699bc79411cc8f207705044e58ced22c588da", size = 21487, upload-time = "2026-04-08T16:17:15.585Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/ea/4ee59599c57561663d9f4d44e5de674f77ad1f7432242aba042768403c31/microsoft_kiota_http-1.10.1-py3-none-any.whl", hash = "sha256:77944e25594a65cced34702fa953bfd631eec1492ae4bef3d398eabc4ed8a6bc", size = 31951, upload-time = "2026-04-08T16:17:16.319Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-form" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/ea/b00db31cebc39c2588319decc4bbca947873861711ba5ce83b74f210b509/microsoft_kiota_serialization_form-1.10.1.tar.gz", hash = "sha256:0bf8415b22998cdf81bf062b92b24e550b0c7874b0d2b52264ec6eb6f7caf76b", size = 9248, upload-time = "2026-04-08T16:17:25.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/a4/67f7450b38837b361b43099457f85f60730adc729dd6cc21afe6e86ace95/microsoft_kiota_serialization_form-1.10.1-py3-none-any.whl", hash = "sha256:232c29eb43c4bf66773a80912fa3e1015d53a88ac6b10f48f18498f4626211bd", size = 10874, upload-time = "2026-04-08T16:17:25.835Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-json" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/ea/36cee5e86b072911da8a4f8b178d101779507381ddb52b34f6e175e2686a/microsoft_kiota_serialization_json-1.10.1.tar.gz", hash = "sha256:71792246fd1f28f73ffb5d916a63763062f84055f197d282ed59c37181ba09c9", size = 9915, upload-time = "2026-04-08T16:17:34.568Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/c6/39c3c653dee9dc36b879f05a1ad270aa218fd7434ec866b3e0b8942f56e9/microsoft_kiota_serialization_json-1.10.1-py3-none-any.whl", hash = "sha256:42b56b49afb412e77dbcdd825660723f1a9355fb438f3e2dcb7a161670b17499", size = 11600, upload-time = "2026-04-08T16:17:35.274Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-multipart" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/82/bb8d50224358e35970f6c038825e5888d9749ae07e630b34276064739580/microsoft_kiota_serialization_multipart-1.10.1.tar.gz", hash = "sha256:c6adcf3eebc822e340008baac39d4e6bec770b77ce5c77259ba5d23e744966bf", size = 5158, upload-time = "2026-04-08T16:18:02.448Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/ce/afd8d4f81c08a159f093f1dbc42ffa3b0b484cafe6e303812bf90ece3b65/microsoft_kiota_serialization_multipart-1.10.1-py3-none-any.whl", hash = "sha256:81f49060a274026227ebf30c04aac0d7022d2ee18cee9fb2149ec308333bf903", size = 6699, upload-time = "2026-04-08T16:18:04.064Z" }, +] + +[[package]] +name = "microsoft-kiota-serialization-text" +version = "1.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "microsoft-kiota-abstractions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/d0/a3f5087c9eab68c581e607287813498ca4c0f324c1ded31251aa9a2ed0df/microsoft_kiota_serialization_text-1.10.1.tar.gz", hash = "sha256:bb2502cf09b740345c4b221d1a38ffb14667a8dc8e572092de1b3ffd80f47732", size = 7316, upload-time = "2026-04-08T16:17:44.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/e9/330603879b363b2bb6ffed81e43ab9c619c6d39d67f285be0fc5b4e6913e/microsoft_kiota_serialization_text-1.10.1-py3-none-any.whl", hash = "sha256:43e3d4e8ae4866440c031ae5dbf6d63a00a38ec62a61ac7eafc047de31b20269", size = 8887, upload-time = "2026-04-08T16:17:44.947Z" }, +] + [[package]] name = "msal" -version = "1.34.0" +version = "1.36.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/cb/b02b0f748ac668922364ccb3c3bff5b71628a05f5adfec2ba2a5c3031483/msal-1.36.0.tar.gz", hash = "sha256:3f6a4af2b036b476a4215111c4297b4e6e236ed186cd804faefba23e4990978b", size = 174217, upload-time = "2026-04-09T10:20:33.525Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d3/414d1f0a5f6f4fe5313c2b002c54e78a3332970feb3f5fed14237aa17064/msal-1.36.0-py3-none-any.whl", hash = "sha256:36ecac30e2ff4322d956029aabce3c82301c29f0acb1ad89b94edcabb0e58ec4", size = 121547, upload-time = "2026-04-09T10:20:32.336Z" }, ] [[package]] @@ -863,6 +1211,137 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, ] +[[package]] +name = "msgraph-core" +version = "1.3.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "microsoft-kiota-abstractions" }, + { name = "microsoft-kiota-authentication-azure" }, + { name = "microsoft-kiota-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/4e/123f9530ec43b306c597bb830c62bedab830ffa76e0edf33ea88a26f756e/msgraph_core-1.3.8.tar.gz", hash = "sha256:6e883f9d4c4ad57501234749e07b010478c1a5f19550ef4cf005bbcac4a63ae7", size = 25506, upload-time = "2025-09-11T22:46:57.267Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/4d/01432f60727ae452787014cad0d5bc9e035c6e11a670f12c23f7fc926d90/msgraph_core-1.3.8-py3-none-any.whl", hash = "sha256:86d83edcf62119946f201d13b7e857c947ef67addb088883940197081de85bea", size = 34473, upload-time = "2025-09-11T22:46:56.026Z" }, +] + +[[package]] +name = "msgraph-sdk" +version = "1.55.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "azure-identity" }, + { name = "microsoft-kiota-serialization-form" }, + { name = "microsoft-kiota-serialization-json" }, + { name = "microsoft-kiota-serialization-multipart" }, + { name = "microsoft-kiota-serialization-text" }, + { name = "msgraph-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/44/0b5a188addf6341b3da10dd207e444417de255f7c1651902ba72016a2843/msgraph_sdk-1.55.0.tar.gz", hash = "sha256:6df691a31954a050d26b8a678968017e157d940fb377f2a8a4e17a9741b98756", size = 6295669, upload-time = "2026-02-20T00:32:29.378Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/a8/de807e62f8ff93003b573aa243cdcee2da2c0618b42efbc9a8e61aa7300d/msgraph_sdk-1.55.0-py3-none-any.whl", hash = "sha256:c8e68ebc4b88af5111de312e7fa910a4e76ddf48a4534feadb1fb8a411c48cfc", size = 25758742, upload-time = "2026-02-20T00:30:40.039Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -883,63 +1362,63 @@ wheels = [ [[package]] name = "numpy" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a4/7a/6a3d14e205d292b738db449d0de649b373a59edb0d0b4493821d0a3e8718/numpy-2.4.0.tar.gz", hash = "sha256:6e504f7b16118198f138ef31ba24d985b124c2c469fe8467007cf30fd992f934", size = 20685720, upload-time = "2025-12-20T16:18:19.023Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/ff/f6400ffec95de41c74b8e73df32e3fff1830633193a7b1e409be7fb1bb8c/numpy-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a8b6bb8369abefb8bd1801b054ad50e02b3275c8614dc6e5b0373c305291037", size = 16653117, upload-time = "2025-12-20T16:16:06.709Z" }, - { url = "https://files.pythonhosted.org/packages/fd/28/6c23e97450035072e8d830a3c411bf1abd1f42c611ff9d29e3d8f55c6252/numpy-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e284ca13d5a8367e43734148622caf0b261b275673823593e3e3634a6490f83", size = 12369711, upload-time = "2025-12-20T16:16:08.758Z" }, - { url = "https://files.pythonhosted.org/packages/bc/af/acbef97b630ab1bb45e6a7d01d1452e4251aa88ce680ac36e56c272120ec/numpy-2.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:49ff32b09f5aa0cd30a20c2b39db3e669c845589f2b7fc910365210887e39344", size = 5198355, upload-time = "2025-12-20T16:16:10.902Z" }, - { url = "https://files.pythonhosted.org/packages/c1/c8/4e0d436b66b826f2e53330adaa6311f5cac9871a5b5c31ad773b27f25a74/numpy-2.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:36cbfb13c152b1c7c184ddac43765db8ad672567e7bafff2cc755a09917ed2e6", size = 6545298, upload-time = "2025-12-20T16:16:12.607Z" }, - { url = "https://files.pythonhosted.org/packages/ef/27/e1f5d144ab54eac34875e79037011d511ac57b21b220063310cb96c80fbc/numpy-2.4.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35ddc8f4914466e6fc954c76527aa91aa763682a4f6d73249ef20b418fe6effb", size = 14398387, upload-time = "2025-12-20T16:16:14.257Z" }, - { url = "https://files.pythonhosted.org/packages/67/64/4cb909dd5ab09a9a5d086eff9586e69e827b88a5585517386879474f4cf7/numpy-2.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc578891de1db95b2a35001b695451767b580bb45753717498213c5ff3c41d63", size = 16363091, upload-time = "2025-12-20T16:16:17.32Z" }, - { url = "https://files.pythonhosted.org/packages/9d/9c/8efe24577523ec6809261859737cf117b0eb6fdb655abdfdc81b2e468ce4/numpy-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98e81648e0b36e325ab67e46b5400a7a6d4a22b8a7c8e8bbfe20e7db7906bf95", size = 16176394, upload-time = "2025-12-20T16:16:19.524Z" }, - { url = "https://files.pythonhosted.org/packages/61/f0/1687441ece7b47a62e45a1f82015352c240765c707928edd8aef875d5951/numpy-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d57b5046c120561ba8fa8e4030fbb8b822f3063910fa901ffadf16e2b7128ad6", size = 18287378, upload-time = "2025-12-20T16:16:22.866Z" }, - { url = "https://files.pythonhosted.org/packages/d3/6f/f868765d44e6fc466467ed810ba9d8d6db1add7d4a748abfa2a4c99a3194/numpy-2.4.0-cp312-cp312-win32.whl", hash = "sha256:92190db305a6f48734d3982f2c60fa30d6b5ee9bff10f2887b930d7b40119f4c", size = 5955432, upload-time = "2025-12-20T16:16:25.06Z" }, - { url = "https://files.pythonhosted.org/packages/d4/b5/94c1e79fcbab38d1ca15e13777477b2914dd2d559b410f96949d6637b085/numpy-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:680060061adb2d74ce352628cb798cfdec399068aa7f07ba9fb818b2b3305f98", size = 12306201, upload-time = "2025-12-20T16:16:26.979Z" }, - { url = "https://files.pythonhosted.org/packages/70/09/c39dadf0b13bb0768cd29d6a3aaff1fb7c6905ac40e9aaeca26b1c086e06/numpy-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:39699233bc72dd482da1415dcb06076e32f60eddc796a796c5fb6c5efce94667", size = 10308234, upload-time = "2025-12-20T16:16:29.417Z" }, - { url = "https://files.pythonhosted.org/packages/a7/0d/853fd96372eda07c824d24adf02e8bc92bb3731b43a9b2a39161c3667cc4/numpy-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a152d86a3ae00ba5f47b3acf3b827509fd0b6cb7d3259665e63dafbad22a75ea", size = 16649088, upload-time = "2025-12-20T16:16:31.421Z" }, - { url = "https://files.pythonhosted.org/packages/e3/37/cc636f1f2a9f585434e20a3e6e63422f70bfe4f7f6698e941db52ea1ac9a/numpy-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:39b19251dec4de8ff8496cd0806cbe27bf0684f765abb1f4809554de93785f2d", size = 12364065, upload-time = "2025-12-20T16:16:33.491Z" }, - { url = "https://files.pythonhosted.org/packages/ed/69/0b78f37ca3690969beee54103ce5f6021709134e8020767e93ba691a72f1/numpy-2.4.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:009bd0ea12d3c784b6639a8457537016ce5172109e585338e11334f6a7bb88ee", size = 5192640, upload-time = "2025-12-20T16:16:35.636Z" }, - { url = "https://files.pythonhosted.org/packages/1d/2a/08569f8252abf590294dbb09a430543ec8f8cc710383abfb3e75cc73aeda/numpy-2.4.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5fe44e277225fd3dff6882d86d3d447205d43532c3627313d17e754fb3905a0e", size = 6541556, upload-time = "2025-12-20T16:16:37.276Z" }, - { url = "https://files.pythonhosted.org/packages/93/e9/a949885a4e177493d61519377952186b6cbfdf1d6002764c664ba28349b5/numpy-2.4.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f935c4493eda9069851058fa0d9e39dbf6286be690066509305e52912714dbb2", size = 14396562, upload-time = "2025-12-20T16:16:38.953Z" }, - { url = "https://files.pythonhosted.org/packages/99/98/9d4ad53b0e9ef901c2ef1d550d2136f5ac42d3fd2988390a6def32e23e48/numpy-2.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cfa5f29a695cb7438965e6c3e8d06e0416060cf0d709c1b1c1653a939bf5c2a", size = 16351719, upload-time = "2025-12-20T16:16:41.503Z" }, - { url = "https://files.pythonhosted.org/packages/28/de/5f3711a38341d6e8dd619f6353251a0cdd07f3d6d101a8fd46f4ef87f895/numpy-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba0cb30acd3ef11c94dc27fbfba68940652492bc107075e7ffe23057f9425681", size = 16176053, upload-time = "2025-12-20T16:16:44.552Z" }, - { url = "https://files.pythonhosted.org/packages/2a/5b/2a3753dc43916501b4183532e7ace862e13211042bceafa253afb5c71272/numpy-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60e8c196cd82cbbd4f130b5290007e13e6de3eca79f0d4d38014769d96a7c475", size = 18277859, upload-time = "2025-12-20T16:16:47.174Z" }, - { url = "https://files.pythonhosted.org/packages/2c/c5/a18bcdd07a941db3076ef489d036ab16d2bfc2eae0cf27e5a26e29189434/numpy-2.4.0-cp313-cp313-win32.whl", hash = "sha256:5f48cb3e88fbc294dc90e215d86fbaf1c852c63dbdb6c3a3e63f45c4b57f7344", size = 5953849, upload-time = "2025-12-20T16:16:49.554Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f1/719010ff8061da6e8a26e1980cf090412d4f5f8060b31f0c45d77dd67a01/numpy-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:a899699294f28f7be8992853c0c60741f16ff199205e2e6cdca155762cbaa59d", size = 12302840, upload-time = "2025-12-20T16:16:51.227Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5a/b3d259083ed8b4d335270c76966cb6cf14a5d1b69e1a608994ac57a659e6/numpy-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9198f447e1dc5647d07c9a6bbe2063cc0132728cc7175b39dbc796da5b54920d", size = 10308509, upload-time = "2025-12-20T16:16:53.313Z" }, - { url = "https://files.pythonhosted.org/packages/31/01/95edcffd1bb6c0633df4e808130545c4f07383ab629ac7e316fb44fff677/numpy-2.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74623f2ab5cc3f7c886add4f735d1031a1d2be4a4ae63c0546cfd74e7a31ddf6", size = 12491815, upload-time = "2025-12-20T16:16:55.496Z" }, - { url = "https://files.pythonhosted.org/packages/59/ea/5644b8baa92cc1c7163b4b4458c8679852733fa74ca49c942cfa82ded4e0/numpy-2.4.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:0804a8e4ab070d1d35496e65ffd3cf8114c136a2b81f61dfab0de4b218aacfd5", size = 5320321, upload-time = "2025-12-20T16:16:57.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/4e/e10938106d70bc21319bd6a86ae726da37edc802ce35a3a71ecdf1fdfe7f/numpy-2.4.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:02a2038eb27f9443a8b266a66911e926566b5a6ffd1a689b588f7f35b81e7dc3", size = 6641635, upload-time = "2025-12-20T16:16:59.379Z" }, - { url = "https://files.pythonhosted.org/packages/b3/8d/a8828e3eaf5c0b4ab116924df82f24ce3416fa38d0674d8f708ddc6c8aac/numpy-2.4.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1889b3a3f47a7b5bee16bc25a2145bd7cb91897f815ce3499db64c7458b6d91d", size = 14456053, upload-time = "2025-12-20T16:17:01.768Z" }, - { url = "https://files.pythonhosted.org/packages/68/a1/17d97609d87d4520aa5ae2dcfb32305654550ac6a35effb946d303e594ce/numpy-2.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85eef4cb5625c47ee6425c58a3502555e10f45ee973da878ac8248ad58c136f3", size = 16401702, upload-time = "2025-12-20T16:17:04.235Z" }, - { url = "https://files.pythonhosted.org/packages/18/32/0f13c1b2d22bea1118356b8b963195446f3af124ed7a5adfa8fdecb1b6ca/numpy-2.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6dc8b7e2f4eb184b37655195f421836cfae6f58197b67e3ffc501f1333d993fa", size = 16242493, upload-time = "2025-12-20T16:17:06.856Z" }, - { url = "https://files.pythonhosted.org/packages/ae/23/48f21e3d309fbc137c068a1475358cbd3a901b3987dcfc97a029ab3068e2/numpy-2.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:44aba2f0cafd287871a495fb3163408b0bd25bbce135c6f621534a07f4f7875c", size = 18324222, upload-time = "2025-12-20T16:17:09.392Z" }, - { url = "https://files.pythonhosted.org/packages/ac/52/41f3d71296a3dcaa4f456aaa3c6fc8e745b43d0552b6bde56571bb4b4a0f/numpy-2.4.0-cp313-cp313t-win32.whl", hash = "sha256:20c115517513831860c573996e395707aa9fb691eb179200125c250e895fcd93", size = 6076216, upload-time = "2025-12-20T16:17:11.437Z" }, - { url = "https://files.pythonhosted.org/packages/35/ff/46fbfe60ab0710d2a2b16995f708750307d30eccbb4c38371ea9e986866e/numpy-2.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b48e35f4ab6f6a7597c46e301126ceba4c44cd3280e3750f85db48b082624fa4", size = 12444263, upload-time = "2025-12-20T16:17:13.182Z" }, - { url = "https://files.pythonhosted.org/packages/a3/e3/9189ab319c01d2ed556c932ccf55064c5d75bb5850d1df7a482ce0badead/numpy-2.4.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4d1cfce39e511069b11e67cd0bd78ceff31443b7c9e5c04db73c7a19f572967c", size = 10378265, upload-time = "2025-12-20T16:17:15.211Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ed/52eac27de39d5e5a6c9aadabe672bc06f55e24a3d9010cd1183948055d76/numpy-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c95eb6db2884917d86cde0b4d4cf31adf485c8ec36bf8696dd66fa70de96f36b", size = 16647476, upload-time = "2025-12-20T16:17:17.671Z" }, - { url = "https://files.pythonhosted.org/packages/77/c0/990ce1b7fcd4e09aeaa574e2a0a839589e4b08b2ca68070f1acb1fea6736/numpy-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:65167da969cd1ec3a1df31cb221ca3a19a8aaa25370ecb17d428415e93c1935e", size = 12374563, upload-time = "2025-12-20T16:17:20.216Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/8c5e389c6ae8f5fd2277a988600d79e9625db3fff011a2d87ac80b881a4c/numpy-2.4.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3de19cfecd1465d0dcf8a5b5ea8b3155b42ed0b639dba4b71e323d74f2a3be5e", size = 5203107, upload-time = "2025-12-20T16:17:22.47Z" }, - { url = "https://files.pythonhosted.org/packages/e6/94/ca5b3bd6a8a70a5eec9a0b8dd7f980c1eff4b8a54970a9a7fef248ef564f/numpy-2.4.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6c05483c3136ac4c91b4e81903cb53a8707d316f488124d0398499a4f8e8ef51", size = 6538067, upload-time = "2025-12-20T16:17:24.001Z" }, - { url = "https://files.pythonhosted.org/packages/79/43/993eb7bb5be6761dde2b3a3a594d689cec83398e3f58f4758010f3b85727/numpy-2.4.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36667db4d6c1cea79c8930ab72fadfb4060feb4bfe724141cd4bd064d2e5f8ce", size = 14411926, upload-time = "2025-12-20T16:17:25.822Z" }, - { url = "https://files.pythonhosted.org/packages/03/75/d4c43b61de473912496317a854dac54f1efec3eeb158438da6884b70bb90/numpy-2.4.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9a818668b674047fd88c4cddada7ab8f1c298812783e8328e956b78dc4807f9f", size = 16354295, upload-time = "2025-12-20T16:17:28.308Z" }, - { url = "https://files.pythonhosted.org/packages/b8/0a/b54615b47ee8736a6461a4bb6749128dd3435c5a759d5663f11f0e9af4ac/numpy-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1ee32359fb7543b7b7bd0b2f46294db27e29e7bbdf70541e81b190836cd83ded", size = 16190242, upload-time = "2025-12-20T16:17:30.993Z" }, - { url = "https://files.pythonhosted.org/packages/98/ce/ea207769aacad6246525ec6c6bbd66a2bf56c72443dc10e2f90feed29290/numpy-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e493962256a38f58283de033d8af176c5c91c084ea30f15834f7545451c42059", size = 18280875, upload-time = "2025-12-20T16:17:33.327Z" }, - { url = "https://files.pythonhosted.org/packages/17/ef/ec409437aa962ea372ed601c519a2b141701683ff028f894b7466f0ab42b/numpy-2.4.0-cp314-cp314-win32.whl", hash = "sha256:6bbaebf0d11567fa8926215ae731e1d58e6ec28a8a25235b8a47405d301332db", size = 6002530, upload-time = "2025-12-20T16:17:35.729Z" }, - { url = "https://files.pythonhosted.org/packages/5f/4a/5cb94c787a3ed1ac65e1271b968686521169a7b3ec0b6544bb3ca32960b0/numpy-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d857f55e7fdf7c38ab96c4558c95b97d1c685be6b05c249f5fdafcbd6f9899e", size = 12435890, upload-time = "2025-12-20T16:17:37.599Z" }, - { url = "https://files.pythonhosted.org/packages/48/a0/04b89db963af9de1104975e2544f30de89adbf75b9e75f7dd2599be12c79/numpy-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:bb50ce5fb202a26fd5404620e7ef820ad1ab3558b444cb0b55beb7ef66cd2d63", size = 10591892, upload-time = "2025-12-20T16:17:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/53/e5/d74b5ccf6712c06c7a545025a6a71bfa03bdc7e0568b405b0d655232fd92/numpy-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:355354388cba60f2132df297e2d53053d4063f79077b67b481d21276d61fc4df", size = 12494312, upload-time = "2025-12-20T16:17:41.714Z" }, - { url = "https://files.pythonhosted.org/packages/c2/08/3ca9cc2ddf54dfee7ae9a6479c071092a228c68aef08252aa08dac2af002/numpy-2.4.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:1d8f9fde5f6dc1b6fc34df8162f3b3079365468703fee7f31d4e0cc8c63baed9", size = 5322862, upload-time = "2025-12-20T16:17:44.145Z" }, - { url = "https://files.pythonhosted.org/packages/87/74/0bb63a68394c0c1e52670cfff2e309afa41edbe11b3327d9af29e4383f34/numpy-2.4.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e0434aa22c821f44eeb4c650b81c7fbdd8c0122c6c4b5a576a76d5a35625ecd9", size = 6644986, upload-time = "2025-12-20T16:17:46.203Z" }, - { url = "https://files.pythonhosted.org/packages/06/8f/9264d9bdbcf8236af2823623fe2f3981d740fc3461e2787e231d97c38c28/numpy-2.4.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40483b2f2d3ba7aad426443767ff5632ec3156ef09742b96913787d13c336471", size = 14457958, upload-time = "2025-12-20T16:17:48.017Z" }, - { url = "https://files.pythonhosted.org/packages/8c/d9/f9a69ae564bbc7236a35aa883319364ef5fd41f72aa320cc1cbe66148fe2/numpy-2.4.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6a7664ddd9746e20b7325351fe1a8408d0a2bf9c63b5e898290ddc8f09544", size = 16398394, upload-time = "2025-12-20T16:17:50.409Z" }, - { url = "https://files.pythonhosted.org/packages/34/c7/39241501408dde7f885d241a98caba5421061a2c6d2b2197ac5e3aa842d8/numpy-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ecb0019d44f4cdb50b676c5d0cb4b1eae8e15d1ed3d3e6639f986fc92b2ec52c", size = 16241044, upload-time = "2025-12-20T16:17:52.661Z" }, - { url = "https://files.pythonhosted.org/packages/7c/95/cae7effd90e065a95e59fe710eeee05d7328ed169776dfdd9f789e032125/numpy-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d0ffd9e2e4441c96a9c91ec1783285d80bf835b677853fc2770a89d50c1e48ac", size = 18321772, upload-time = "2025-12-20T16:17:54.947Z" }, - { url = "https://files.pythonhosted.org/packages/96/df/3c6c279accd2bfb968a76298e5b276310bd55d243df4fa8ac5816d79347d/numpy-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:77f0d13fa87036d7553bf81f0e1fe3ce68d14c9976c9851744e4d3e91127e95f", size = 6148320, upload-time = "2025-12-20T16:17:57.249Z" }, - { url = "https://files.pythonhosted.org/packages/92/8d/f23033cce252e7a75cae853d17f582e86534c46404dea1c8ee094a9d6d84/numpy-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b1f5b45829ac1848893f0ddf5cb326110604d6df96cdc255b0bf9edd154104d4", size = 12623460, upload-time = "2025-12-20T16:17:58.963Z" }, - { url = "https://files.pythonhosted.org/packages/a4/4f/1f8475907d1a7c4ef9020edf7f39ea2422ec896849245f00688e4b268a71/numpy-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:23a3e9d1a6f360267e8fbb38ba5db355a6a7e9be71d7fce7ab3125e88bb646c8", size = 10661799, upload-time = "2025-12-20T16:18:01.078Z" }, +version = "2.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/d0a583ce4fefcc3308806a749a536c201ed6b5ad6e1322e227ee4848979d/numpy-2.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08f2e31ed5e6f04b118e49821397f12767934cfdd12a1ce86a058f91e004ee50", size = 16684933, upload-time = "2026-03-29T13:19:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/2b7a48fbb745d344742c0277f01286dead15f3f68e4f359fbfcf7b48f70f/numpy-2.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e823b8b6edc81e747526f70f71a9c0a07ac4e7ad13020aa736bb7c9d67196115", size = 14694532, upload-time = "2026-03-29T13:19:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/499737bfba066b4a3bebff24a8f1c5b2dee410b209bc6668c9be692580f0/numpy-2.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4a19d9dba1a76618dd86b164d608566f393f8ec6ac7c44f0cc879011c45e65af", size = 5199661, upload-time = "2026-03-29T13:19:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/cd/da/464d551604320d1491bc345efed99b4b7034143a85787aab78d5691d5a0e/numpy-2.4.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d2a8490669bfe99a233298348acc2d824d496dee0e66e31b66a6022c2ad74a5c", size = 6547539, upload-time = "2026-03-29T13:19:30.97Z" }, + { url = "https://files.pythonhosted.org/packages/7d/90/8d23e3b0dafd024bf31bdec225b3bb5c2dbfa6912f8a53b8659f21216cbf/numpy-2.4.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45dbed2ab436a9e826e302fcdcbe9133f9b0006e5af7168afb8963a6520da103", size = 15668806, upload-time = "2026-03-29T13:19:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/d1/73/a9d864e42a01896bb5974475438f16086be9ba1f0d19d0bb7a07427c4a8b/numpy-2.4.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c901b15172510173f5cb310eae652908340f8dede90fff9e3bf6c0d8dfd92f83", size = 16632682, upload-time = "2026-03-29T13:19:37.336Z" }, + { url = "https://files.pythonhosted.org/packages/34/fb/14570d65c3bde4e202a031210475ae9cde9b7686a2e7dc97ee67d2833b35/numpy-2.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99d838547ace2c4aace6c4f76e879ddfe02bb58a80c1549928477862b7a6d6ed", size = 17019810, upload-time = "2026-03-29T13:19:40.963Z" }, + { url = "https://files.pythonhosted.org/packages/8a/77/2ba9d87081fd41f6d640c83f26fb7351e536b7ce6dd9061b6af5904e8e46/numpy-2.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0aec54fd785890ecca25a6003fd9a5aed47ad607bbac5cd64f836ad8666f4959", size = 18357394, upload-time = "2026-03-29T13:19:44.859Z" }, + { url = "https://files.pythonhosted.org/packages/a2/23/52666c9a41708b0853fa3b1a12c90da38c507a3074883823126d4e9d5b30/numpy-2.4.4-cp313-cp313-win32.whl", hash = "sha256:07077278157d02f65c43b1b26a3886bce886f95d20aabd11f87932750dfb14ed", size = 5959556, upload-time = "2026-03-29T13:19:47.661Z" }, + { url = "https://files.pythonhosted.org/packages/57/fb/48649b4971cde70d817cf97a2a2fdc0b4d8308569f1dd2f2611959d2e0cf/numpy-2.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:5c70f1cc1c4efbe316a572e2d8b9b9cc44e89b95f79ca3331553fbb63716e2bf", size = 12317311, upload-time = "2026-03-29T13:19:50.67Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/11490cddd564eb4de97b4579ef6bfe6a736cc07e94c1598590ae25415e01/numpy-2.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:ef4059d6e5152fa1a39f888e344c73fdc926e1b2dd58c771d67b0acfbf2aa67d", size = 10222060, upload-time = "2026-03-29T13:19:54.229Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/dab4339177a905aad3e2221c915b35202f1ec30d750dd2e5e9d9a72b804b/numpy-2.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4bbc7f303d125971f60ec0aaad5e12c62d0d2c925f0ab1273debd0e4ba37aba5", size = 14822302, upload-time = "2026-03-29T13:19:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/0564a65e7d3d97562ed6f9b0fd0fb0a6f559ee444092f105938b50043876/numpy-2.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:4d6d57903571f86180eb98f8f0c839fa9ebbfb031356d87f1361be91e433f5b7", size = 5327407, upload-time = "2026-03-29T13:20:00.601Z" }, + { url = "https://files.pythonhosted.org/packages/29/8d/35a3a6ce5ad371afa58b4700f1c820f8f279948cca32524e0a695b0ded83/numpy-2.4.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:4636de7fd195197b7535f231b5de9e4b36d2c440b6e566d2e4e4746e6af0ca93", size = 6647631, upload-time = "2026-03-29T13:20:02.855Z" }, + { url = "https://files.pythonhosted.org/packages/f4/da/477731acbd5a58a946c736edfdabb2ac5b34c3d08d1ba1a7b437fa0884df/numpy-2.4.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad2e2ef14e0b04e544ea2fa0a36463f847f113d314aa02e5b402fdf910ef309e", size = 15727691, upload-time = "2026-03-29T13:20:06.004Z" }, + { url = "https://files.pythonhosted.org/packages/e6/db/338535d9b152beabeb511579598418ba0212ce77cf9718edd70262cc4370/numpy-2.4.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a285b3b96f951841799528cd1f4f01cd70e7e0204b4abebac9463eecfcf2a40", size = 16681241, upload-time = "2026-03-29T13:20:09.417Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/ad248e8f58beb7a0219b413c9c7d8151c5d285f7f946c3e26695bdbbe2df/numpy-2.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f8474c4241bc18b750be2abea9d7a9ec84f46ef861dbacf86a4f6e043401f79e", size = 17085767, upload-time = "2026-03-29T13:20:13.126Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1a/3b88ccd3694681356f70da841630e4725a7264d6a885c8d442a697e1146b/numpy-2.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4e874c976154687c1f71715b034739b45c7711bec81db01914770373d125e392", size = 18403169, upload-time = "2026-03-29T13:20:17.096Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c9/fcfd5d0639222c6eac7f304829b04892ef51c96a75d479214d77e3ce6e33/numpy-2.4.4-cp313-cp313t-win32.whl", hash = "sha256:9c585a1790d5436a5374bac930dad6ed244c046ed91b2b2a3634eb2971d21008", size = 6083477, upload-time = "2026-03-29T13:20:20.195Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e3/3938a61d1c538aaec8ed6fd6323f57b0c2d2d2219512434c5c878db76553/numpy-2.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:93e15038125dc1e5345d9b5b68aa7f996ec33b98118d18c6ca0d0b7d6198b7e8", size = 12457487, upload-time = "2026-03-29T13:20:22.946Z" }, + { url = "https://files.pythonhosted.org/packages/97/6a/7e345032cc60501721ef94e0e30b60f6b0bd601f9174ebd36389a2b86d40/numpy-2.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:0dfd3f9d3adbe2920b68b5cd3d51444e13a10792ec7154cd0a2f6e74d4ab3233", size = 10292002, upload-time = "2026-03-29T13:20:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/6e/06/c54062f85f673dd5c04cbe2f14c3acb8c8b95e3384869bb8cc9bff8cb9df/numpy-2.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f169b9a863d34f5d11b8698ead99febeaa17a13ca044961aa8e2662a6c7766a0", size = 16684353, upload-time = "2026-03-29T13:20:29.504Z" }, + { url = "https://files.pythonhosted.org/packages/4c/39/8a320264a84404c74cc7e79715de85d6130fa07a0898f67fb5cd5bd79908/numpy-2.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2483e4584a1cb3092da4470b38866634bafb223cbcd551ee047633fd2584599a", size = 14704914, upload-time = "2026-03-29T13:20:33.547Z" }, + { url = "https://files.pythonhosted.org/packages/91/fb/287076b2614e1d1044235f50f03748f31fa287e3dbe6abeb35cdfa351eca/numpy-2.4.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:2d19e6e2095506d1736b7d80595e0f252d76b89f5e715c35e06e937679ea7d7a", size = 5210005, upload-time = "2026-03-29T13:20:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/63/eb/fcc338595309910de6ecabfcef2419a9ce24399680bfb149421fa2df1280/numpy-2.4.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6a246d5914aa1c820c9443ddcee9c02bec3e203b0c080349533fae17727dfd1b", size = 6544974, upload-time = "2026-03-29T13:20:39.014Z" }, + { url = "https://files.pythonhosted.org/packages/44/5d/e7e9044032a716cdfaa3fba27a8e874bf1c5f1912a1ddd4ed071bf8a14a6/numpy-2.4.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:989824e9faf85f96ec9c7761cd8d29c531ad857bfa1daa930cba85baaecf1a9a", size = 15684591, upload-time = "2026-03-29T13:20:42.146Z" }, + { url = "https://files.pythonhosted.org/packages/98/7c/21252050676612625449b4807d6b695b9ce8a7c9e1c197ee6216c8a65c7c/numpy-2.4.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27a8d92cd10f1382a67d7cf4db7ce18341b66438bdd9f691d7b0e48d104c2a9d", size = 16637700, upload-time = "2026-03-29T13:20:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b1/29/56d2bbef9465db24ef25393383d761a1af4f446a1df9b8cded4fe3a5a5d7/numpy-2.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e44319a2953c738205bf3354537979eaa3998ed673395b964c1176083dd46252", size = 17035781, upload-time = "2026-03-29T13:20:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e3/2b/a35a6d7589d21f44cea7d0a98de5ddcbb3d421b2622a5c96b1edf18707c3/numpy-2.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e892aff75639bbef0d2a2cfd55535510df26ff92f63c92cd84ef8d4ba5a5557f", size = 18362959, upload-time = "2026-03-29T13:20:54.019Z" }, + { url = "https://files.pythonhosted.org/packages/64/c9/d52ec581f2390e0f5f85cbfd80fb83d965fc15e9f0e1aec2195faa142cde/numpy-2.4.4-cp314-cp314-win32.whl", hash = "sha256:1378871da56ca8943c2ba674530924bb8ca40cd228358a3b5f302ad60cf875fc", size = 6008768, upload-time = "2026-03-29T13:20:56.912Z" }, + { url = "https://files.pythonhosted.org/packages/fa/22/4cc31a62a6c7b74a8730e31a4274c5dc80e005751e277a2ce38e675e4923/numpy-2.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:715d1c092715954784bc79e1174fc2a90093dc4dc84ea15eb14dad8abdcdeb74", size = 12449181, upload-time = "2026-03-29T13:20:59.548Z" }, + { url = "https://files.pythonhosted.org/packages/70/2e/14cda6f4d8e396c612d1bf97f22958e92148801d7e4f110cabebdc0eef4b/numpy-2.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:2c194dd721e54ecad9ad387c1d35e63dce5c4450c6dc7dd5611283dda239aabb", size = 10496035, upload-time = "2026-03-29T13:21:02.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/8fed8c8d848d7ecea092dc3469643f9d10bc3a134a815a3b033da1d2039b/numpy-2.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2aa0613a5177c264ff5921051a5719d20095ea586ca88cc802c5c218d1c67d3e", size = 14824958, upload-time = "2026-03-29T13:21:05.671Z" }, + { url = "https://files.pythonhosted.org/packages/05/1a/d8007a5138c179c2bf33ef44503e83d70434d2642877ee8fbb230e7c0548/numpy-2.4.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:42c16925aa5a02362f986765f9ebabf20de75cdefdca827d14315c568dcab113", size = 5330020, upload-time = "2026-03-29T13:21:08.635Z" }, + { url = "https://files.pythonhosted.org/packages/99/64/ffb99ac6ae93faf117bcbd5c7ba48a7f45364a33e8e458545d3633615dda/numpy-2.4.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:874f200b2a981c647340f841730fc3a2b54c9d940566a3c4149099591e2c4c3d", size = 6650758, upload-time = "2026-03-29T13:21:10.949Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6e/795cc078b78a384052e73b2f6281ff7a700e9bf53bcce2ee579d4f6dd879/numpy-2.4.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b39d38a9bd2ae1becd7eac1303d031c5c110ad31f2b319c6e7d98b135c934d", size = 15729948, upload-time = "2026-03-29T13:21:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/5f/86/2acbda8cc2af5f3d7bfc791192863b9e3e19674da7b5e533fded124d1299/numpy-2.4.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b268594bccac7d7cf5844c7732e3f20c50921d94e36d7ec9b79e9857694b1b2f", size = 16679325, upload-time = "2026-03-29T13:21:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/bc/59/cafd83018f4aa55e0ac6fa92aa066c0a1877b77a615ceff1711c260ffae8/numpy-2.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ac6b31e35612a26483e20750126d30d0941f949426974cace8e6b5c58a3657b0", size = 17084883, upload-time = "2026-03-29T13:21:21.106Z" }, + { url = "https://files.pythonhosted.org/packages/f0/85/a42548db84e65ece46ab2caea3d3f78b416a47af387fcbb47ec28e660dc2/numpy-2.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e3ed142f2728df44263aaf5fb1f5b0b99f4070c553a0d7f033be65338329150", size = 18403474, upload-time = "2026-03-29T13:21:24.828Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ad/483d9e262f4b831000062e5d8a45e342166ec8aaa1195264982bca267e62/numpy-2.4.4-cp314-cp314t-win32.whl", hash = "sha256:dddbbd259598d7240b18c9d87c56a9d2fb3b02fe266f49a7c101532e78c1d871", size = 6155500, upload-time = "2026-03-29T13:21:28.205Z" }, + { url = "https://files.pythonhosted.org/packages/c7/03/2fc4e14c7bd4ff2964b74ba90ecb8552540b6315f201df70f137faa5c589/numpy-2.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:a7164afb23be6e37ad90b2f10426149fd75aee07ca55653d2aa41e66c4ef697e", size = 12637755, upload-time = "2026-03-29T13:21:31.107Z" }, + { url = "https://files.pythonhosted.org/packages/58/78/548fb8e07b1a341746bfbecb32f2c268470f45fa028aacdbd10d9bc73aab/numpy-2.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:ba203255017337d39f89bdd58417f03c4426f12beed0440cfd933cb15f8669c7", size = 10566643, upload-time = "2026-03-29T13:21:34.339Z" }, ] [[package]] @@ -953,7 +1432,7 @@ wheels = [ [[package]] name = "openai" -version = "2.14.0" +version = "2.31.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -965,9 +1444,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/b1/12fe1c196bea326261718eb037307c1c1fe1dedc2d2d4de777df822e6238/openai-2.14.0.tar.gz", hash = "sha256:419357bedde9402d23bf8f2ee372fca1985a73348debba94bddff06f19459952", size = 626938, upload-time = "2025-12-19T03:28:45.742Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/fe/64b3d035780b3188f86c4f6f1bc202e7bb74757ef028802112273b9dcacf/openai-2.31.0.tar.gz", hash = "sha256:43ca59a88fc973ad1848d86b98d7fac207e265ebbd1828b5e4bdfc85f79427a5", size = 684772, upload-time = "2026-04-08T21:01:41.797Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" }, + { url = "https://files.pythonhosted.org/packages/66/bc/a8f7c3aa03452fedbb9af8be83e959adba96a6b4a35e416faffcc959c568/openai-2.31.0-py3-none-any.whl", hash = "sha256:44e1344d87e56a493d649b17e2fac519d1368cbb0745f59f1957c4c26de50a0a", size = 1153479, upload-time = "2026-04-08T21:01:39.217Z" }, ] [[package]] @@ -1094,29 +1573,29 @@ wheels = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] name = "pathspec" -version = "1.0.2" +version = "1.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/b9/6eb731b52f132181a9144bbe77ff82117f6b2d2fbfba49aaab2c014c4760/pathspec-1.0.2.tar.gz", hash = "sha256:fa32b1eb775ed9ba8d599b22c5f906dc098113989da2c00bf8b210078ca7fb92", size = 130502, upload-time = "2026-01-08T04:33:27.613Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/6b/14fc9049d78435fd29e82846c777bd7ed9c470013dc8d0260fff3ff1c11e/pathspec-1.0.2-py3-none-any.whl", hash = "sha256:62f8558917908d237d399b9b338ef455a814801a4688bc41074b25feefd93472", size = 54844, upload-time = "2026-01-08T04:33:26.4Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, ] [[package]] name = "platformdirs" -version = "4.5.1" +version = "4.9.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/4a/0883b8e3802965322523f0b200ecf33d31f10991d0401162f4b23c698b42/platformdirs-4.9.6.tar.gz", hash = "sha256:3bfa75b0ad0db84096ae777218481852c0ebc6c727b3168c1b9e0118e458cf0a", size = 29400, upload-time = "2026-04-09T00:04:10.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, + { url = "https://files.pythonhosted.org/packages/75/a6/a0a304dc33b49145b21f4808d763822111e67d1c3a32b524a1baf947b6e1/platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917", size = 21348, upload-time = "2026-04-09T00:04:09.463Z" }, ] [[package]] @@ -1128,40 +1607,124 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + [[package]] name = "proto-plus" -version = "1.27.0" +version = "1.27.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/89/9cbe2f4bba860e149108b683bc2efec21f14d5f7ed6e25562ad86acbc373/proto_plus-1.27.0.tar.gz", hash = "sha256:873af56dd0d7e91836aee871e5799e1c6f1bda86ac9a983e0bb9f0c266a568c4", size = 56158, upload-time = "2025-12-16T13:46:25.729Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/0d/94dfe80193e79d55258345901acd2917523d56e8381bc4dee7fd38e3868a/proto_plus-1.27.2.tar.gz", hash = "sha256:b2adde53adadf75737c44d3dcb0104fde65250dfc83ad59168b4aa3e574b6a24", size = 57204, upload-time = "2026-03-26T22:18:57.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/24/3b7a0818484df9c28172857af32c2397b6d8fcd99d9468bd4684f98ebf0a/proto_plus-1.27.0-py3-none-any.whl", hash = "sha256:1baa7f81cf0f8acb8bc1f6d085008ba4171eaf669629d1b6d1673b21ed1c0a82", size = 50205, upload-time = "2025-12-16T13:46:24.76Z" }, + { url = "https://files.pythonhosted.org/packages/84/f3/1fba73eeffafc998a25d59703b63f8be4fe8a5cb12eaff7386a0ba0f7125/proto_plus-1.27.2-py3-none-any.whl", hash = "sha256:6432f75893d3b9e70b9c412f1d2f03f65b11fb164b793d14ae2ca01821d22718", size = 50450, upload-time = "2026-03-26T22:13:42.927Z" }, ] [[package]] name = "protobuf" -version = "6.33.2" +version = "6.33.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/70/e908e9c5e52ef7c3a6c7902c9dfbb34c7e29c25d2f81ade3856445fd5c94/protobuf-6.33.6.tar.gz", hash = "sha256:a6768d25248312c297558af96a9f9c929e8c4cee0659cb07e780731095f38135", size = 444531, upload-time = "2026-03-18T19:05:00.988Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" }, - { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" }, - { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, - { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, - { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, - { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, - { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, + { url = "https://files.pythonhosted.org/packages/fc/9f/2f509339e89cfa6f6a4c4ff50438db9ca488dec341f7e454adad60150b00/protobuf-6.33.6-cp310-abi3-win32.whl", hash = "sha256:7d29d9b65f8afef196f8334e80d6bc1d5d4adedb449971fefd3723824e6e77d3", size = 425739, upload-time = "2026-03-18T19:04:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/76/5d/683efcd4798e0030c1bab27374fd13a89f7c2515fb1f3123efdfaa5eab57/protobuf-6.33.6-cp310-abi3-win_amd64.whl", hash = "sha256:0cd27b587afca21b7cfa59a74dcbd48a50f0a6400cfb59391340ad729d91d326", size = 437089, upload-time = "2026-03-18T19:04:50.381Z" }, + { url = "https://files.pythonhosted.org/packages/5c/01/a3c3ed5cd186f39e7880f8303cc51385a198a81469d53d0fdecf1f64d929/protobuf-6.33.6-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:9720e6961b251bde64edfdab7d500725a2af5280f3f4c87e57c0208376aa8c3a", size = 427737, upload-time = "2026-03-18T19:04:51.866Z" }, + { url = "https://files.pythonhosted.org/packages/ee/90/b3c01fdec7d2f627b3a6884243ba328c1217ed2d978def5c12dc50d328a3/protobuf-6.33.6-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e2afbae9b8e1825e3529f88d514754e094278bb95eadc0e199751cdd9a2e82a2", size = 324610, upload-time = "2026-03-18T19:04:53.096Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/25afc144934014700c52e05103c2421997482d561f3101ff352e1292fb81/protobuf-6.33.6-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c96c37eec15086b79762ed265d59ab204dabc53056e3443e702d2681f4b39ce3", size = 339381, upload-time = "2026-03-18T19:04:54.616Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/d1e32e3e0d894fe00b15ce28ad4944ab692713f2e7f0a99787405e43533a/protobuf-6.33.6-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:e9db7e292e0ab79dd108d7f1a94fe31601ce1ee3f7b79e0692043423020b0593", size = 323436, upload-time = "2026-03-18T19:04:55.768Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/02445137af02769918a93807b2b7890047c32bfb9f90371cbc12688819eb/protobuf-6.33.6-py3-none-any.whl", hash = "sha256:77179e006c476e69bf8e8ce866640091ec42e1beb80b213c3900006ecfba6901", size = 170656, upload-time = "2026-03-18T19:04:59.826Z" }, ] [[package]] name = "pyasn1" -version = "0.6.2" +version = "0.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" }, ] [[package]] @@ -1178,16 +1741,16 @@ wheels = [ [[package]] name = "pycparser" -version = "2.23" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] name = "pydantic" -version = "2.12.5" +version = "2.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -1195,27 +1758,27 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +sdist = { url = "https://files.pythonhosted.org/packages/84/6b/69fd5c7194b21ebde0f8637e2a4ddc766ada29d472bfa6a5ca533d79549a/pydantic-2.13.0.tar.gz", hash = "sha256:b89b575b6e670ebf6e7448c01b41b244f471edd276cd0b6fe02e7e7aca320070", size = 843468, upload-time = "2026-04-13T10:51:35.571Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, + { url = "https://files.pythonhosted.org/packages/01/d7/c3a52c61f5b7be648e919005820fbac33028c6149994cd64453f49951c17/pydantic-2.13.0-py3-none-any.whl", hash = "sha256:ab0078b90da5f3e2fd2e71e3d9b457ddcb35d0350854fbda93b451e28d56baaf", size = 471872, upload-time = "2026-04-13T10:51:33.343Z" }, ] [[package]] name = "pydantic-ai-slim" -version = "1.40.0" +version = "1.80.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "genai-prices" }, - { name = "griffe" }, + { name = "griffelib" }, { name = "httpx" }, { name = "opentelemetry-api" }, { name = "pydantic" }, { name = "pydantic-graph" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/a4/7d4e5386c1c66c56218d4e469ae6a5131dafff79f39d7d06ee85cebae048/pydantic_ai_slim-1.40.0.tar.gz", hash = "sha256:965270975b950a13e7ff4f0dc88e33f6371f765e29c66782d6802aa85509d327", size = 369669, upload-time = "2026-01-07T01:35:39.271Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/27/aa309951a8973a8525fdf9e45b49960105bc78ebc8dba48366c2853538ae/pydantic_ai_slim-1.80.0.tar.gz", hash = "sha256:034f7f910dfce5d82528c74a717a99065ae548390f0b906165972cd13a87d2cf", size = 549153, upload-time = "2026-04-10T23:31:19.862Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/d2/b5d879328cde82683fbb69fa2fa047a4f7c7096146576eb69c5493529f65/pydantic_ai_slim-1.40.0-py3-none-any.whl", hash = "sha256:a39e8e7d6902e83fa556f5b9ed410a784b5494a649a1e6d743998e0fa600307b", size = 485749, upload-time = "2026-01-07T01:35:28.954Z" }, + { url = "https://files.pythonhosted.org/packages/23/bf/ef273265ef3530cf432fd6d0014ceed57d1cc5b1550fd975bc91152633c8/pydantic_ai_slim-1.80.0-py3-none-any.whl", hash = "sha256:160ad31f522c3d091f3ce32b478d26034c05b6c2c84798a4c8b191c7f9f94bee", size = 703157, upload-time = "2026-04-10T23:31:12Z" }, ] [package.optional-dependencies] @@ -1226,78 +1789,82 @@ openai = [ [[package]] name = "pydantic-core" -version = "2.41.5" +version = "2.46.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, - { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, - { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, - { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, - { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, - { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, - { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, - { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, - { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, - { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, - { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, - { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, - { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, - { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, - { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, - { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, - { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, - { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, - { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, - { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, - { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, - { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, - { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, - { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/6f/0a/9414cddf82eda3976b14048cc0fa8f5b5d1aecb0b22e1dcd2dbfe0e139b1/pydantic_core-2.46.0.tar.gz", hash = "sha256:82d2498c96be47b47e903e1378d1d0f770097ec56ea953322f39936a7cf34977", size = 471441, upload-time = "2026-04-13T09:06:33.813Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/d2/206c72ad47071559142a35f71efc29eb16448a4a5ae9487230ab8e4e292b/pydantic_core-2.46.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:66ccedb02c934622612448489824955838a221b3a35875458970521ef17b2f9c", size = 2117060, upload-time = "2026-04-13T09:04:47.443Z" }, + { url = "https://files.pythonhosted.org/packages/17/2c/7a53b33f91c8b77e696b1a6aa3bed609bf9374bdc0f8dcda681bc7d922b8/pydantic_core-2.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a44f27f4d2788ef9876ec47a43739b118c5904d74f418f53398f6ced3bbcacf2", size = 1951802, upload-time = "2026-04-13T09:05:34.591Z" }, + { url = "https://files.pythonhosted.org/packages/fc/20/90e548c1f6d38800ef11c915881525770ce270d8e5e887563ff046a08674/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26a1032bcce6ca4b4670eb3f7d8195bd0a8b8f255f1307823e217ca3cfa7c27", size = 1976621, upload-time = "2026-04-13T09:04:03.909Z" }, + { url = "https://files.pythonhosted.org/packages/20/3c/9c5810ca70b60c623488cdd80f7e9ee1a0812df81e97098b64788719860f/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b8d1412f725060527e56675904b17a2d421dddcf861eecf7c75b9dda47921a4", size = 2056721, upload-time = "2026-04-13T09:04:40.992Z" }, + { url = "https://files.pythonhosted.org/packages/1a/a3/d6e5f4cdec84278431c75540f90838c9d0a4dfe9402a8f3902073660ff28/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc3d1569edd859cabaa476cabce9eecd05049a7966af7b4a33b541bfd4ca1104", size = 2239634, upload-time = "2026-04-13T09:03:52.478Z" }, + { url = "https://files.pythonhosted.org/packages/46/42/ef58aacf330d8de6e309d62469aa1f80e945eaf665929b4037ac1bfcebc1/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38108976f2d8afaa8f5067fd1390a8c9f5cc580175407cda636e76bc76e88054", size = 2315739, upload-time = "2026-04-13T09:05:04.971Z" }, + { url = "https://files.pythonhosted.org/packages/8b/86/c63b12fafa2d86a515bfd1840b39c23a49302f02b653161bf9c3a0566c50/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5a06d8ed01dad5575056b5187e5959b336793c6047920a3441ee5b03533836", size = 2098169, upload-time = "2026-04-13T09:07:27.151Z" }, + { url = "https://files.pythonhosted.org/packages/76/19/b5b33a2f6be4755b21a20434293c4364be255f4c1a108f125d101d4cc4ee/pydantic_core-2.46.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:04017ace142da9ce27cafd423a480872571b5c7e80382aec22f7d715ca8eb870", size = 2170830, upload-time = "2026-04-13T09:04:39.448Z" }, + { url = "https://files.pythonhosted.org/packages/99/ae/7559f99a29b7d440012ddb4da897359304988a881efaca912fd2f655652e/pydantic_core-2.46.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2629ad992ed1b1c012e6067f5ffafd3336fcb9b54569449fabb85621f1444ed3", size = 2203901, upload-time = "2026-04-13T09:04:01.048Z" }, + { url = "https://files.pythonhosted.org/packages/dd/0e/b0ef945a39aeb4ac58da316813e1106b7fbdfbf20ac141c1c27904355ac5/pydantic_core-2.46.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3068b1e7bd986aebc88f6859f8353e72072538dcf92a7fb9cf511a0f61c5e729", size = 2191789, upload-time = "2026-04-13T09:06:39.915Z" }, + { url = "https://files.pythonhosted.org/packages/90/f4/830484e07188c1236b013995818888ab93bab8fd88aa9689b1d8fd22220d/pydantic_core-2.46.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:1e366916ff69ff700aa9326601634e688581bc24c5b6b4f8738d809ec7d72611", size = 2344423, upload-time = "2026-04-13T09:05:12.252Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ba/e455c18cbdc333177af754e740be4fe9d1de173d65bbe534daf88da02ac0/pydantic_core-2.46.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:485a23e8f4618a1b8e23ac744180acde283fffe617f96923d25507d5cade62ec", size = 2384037, upload-time = "2026-04-13T09:06:24.503Z" }, + { url = "https://files.pythonhosted.org/packages/78/1f/b35d20d73144a41e78de0ae398e60fdd8bed91667daa1a5a92ab958551ba/pydantic_core-2.46.0-cp312-cp312-win32.whl", hash = "sha256:520940e1b702fe3b33525d0351777f25e9924f1818ca7956447dabacf2d339fd", size = 1967068, upload-time = "2026-04-13T09:05:23.374Z" }, + { url = "https://files.pythonhosted.org/packages/d1/84/4b6252e9606e8295647b848233cc4137ee0a04ebba8f0f9fb2977655b38c/pydantic_core-2.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:90d2048e0339fa365e5a66aefe760ddd3b3d0a45501e088bc5bc7f4ed9ff9571", size = 2071008, upload-time = "2026-04-13T09:05:21.392Z" }, + { url = "https://files.pythonhosted.org/packages/39/95/d08eb508d4d5560ccbd226ee5971e5ef9b749aba9b413c0c4ed6e406d4f6/pydantic_core-2.46.0-cp312-cp312-win_arm64.whl", hash = "sha256:a70247649b7dffe36648e8f34be5ce8c5fa0a27ff07b071ea780c20a738c05ce", size = 2036634, upload-time = "2026-04-13T09:05:48.299Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/ab3b0742bad1d51822f1af0c4232208408902bdcfc47601f3b812e09e6c2/pydantic_core-2.46.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a05900c37264c070c683c650cbca8f83d7cbb549719e645fcd81a24592eac788", size = 2116814, upload-time = "2026-04-13T09:04:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/98/08/30b43d9569d69094a0899a199711c43aa58fce6ce80f6a8f7693673eb995/pydantic_core-2.46.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8de8e482fd4f1e3f36c50c6aac46d044462615d8f12cfafc6bebeaa0909eea22", size = 1951867, upload-time = "2026-04-13T09:04:02.364Z" }, + { url = "https://files.pythonhosted.org/packages/db/a0/bf9a1ba34537c2ed3872a48195291138fdec8fe26c4009776f00d63cf0c8/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c525ecf8a4cdf198327b65030a7d081867ad8e60acb01a7214fff95cf9832d47", size = 1977040, upload-time = "2026-04-13T09:06:16.088Z" }, + { url = "https://files.pythonhosted.org/packages/71/70/0ba03c20e1e118219fc18c5417b008b7e880f0e3fb38560ec4465984d471/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f14581aeb12e61542ce73b9bfef2bca5439d65d9ab3efe1a4d8e346b61838f9b", size = 2055284, upload-time = "2026-04-13T09:05:25.125Z" }, + { url = "https://files.pythonhosted.org/packages/58/cf/1e320acefbde7fb7158a9e5def55e0adf9a4634636098ce28dc6b978e0d3/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c108067f2f7e190d0dbd81247d789ec41f9ea50ccd9265a3a46710796ac60530", size = 2238896, upload-time = "2026-04-13T09:05:01.345Z" }, + { url = "https://files.pythonhosted.org/packages/df/f5/ea8ba209756abe9eba891bb0ef3772b4c59a894eb9ad86cd5bd0dd4e3e52/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ac10967e9a7bb1b96697374513f9a1a90a59e2fb41566b5e00ee45392beac59", size = 2314353, upload-time = "2026-04-13T09:06:07.942Z" }, + { url = "https://files.pythonhosted.org/packages/e8/f8/5885350203b72e96438eee7f94de0d8f0442f4627237ca8ef75de34db1cd/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7897078fe8a13b73623c0955dfb2b3d2c9acb7177aac25144758c9e5a5265aaa", size = 2098522, upload-time = "2026-04-13T09:04:23.239Z" }, + { url = "https://files.pythonhosted.org/packages/bf/88/5930b0e828e371db5a556dd3189565417ddc3d8316bb001058168aadcf5f/pydantic_core-2.46.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:e69ce405510a419a082a78faed65bb4249cfb51232293cc675645c12f7379bf7", size = 2168757, upload-time = "2026-04-13T09:07:12.46Z" }, + { url = "https://files.pythonhosted.org/packages/da/75/63d563d3035a0548e721c38b5b69fd5626fdd51da0f09ff4467503915b82/pydantic_core-2.46.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd28d13eea0d8cf351dc1fe274b5070cc8e1cca2644381dee5f99de629e77cf3", size = 2202518, upload-time = "2026-04-13T09:05:44.418Z" }, + { url = "https://files.pythonhosted.org/packages/a7/53/1958eacbfddc41aadf5ae86dd85041bf054b675f34a2fa76385935f96070/pydantic_core-2.46.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ee1547a6b8243e73dd10f585555e5a263395e55ce6dea618a078570a1e889aef", size = 2190148, upload-time = "2026-04-13T09:06:56.151Z" }, + { url = "https://files.pythonhosted.org/packages/c7/17/098cc6d3595e4623186f2bc6604a6195eb182e126702a90517236391e9ce/pydantic_core-2.46.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:c3dc68dcf62db22a18ddfc3ad4960038f72b75908edc48ae014d7ac8b391d57a", size = 2342925, upload-time = "2026-04-13T09:04:17.286Z" }, + { url = "https://files.pythonhosted.org/packages/71/a7/abdb924620b1ac535c690b36ad5b8871f376104090f8842c08625cecf1d3/pydantic_core-2.46.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:004a2081c881abfcc6854a4623da6a09090a0d7c1398a6ae7133ca1256cee70b", size = 2383167, upload-time = "2026-04-13T09:04:52.643Z" }, + { url = "https://files.pythonhosted.org/packages/d7/c9/2ddd10f50e4b7350d2574629a0f53d8d4eb6573f9c19a6b43e6b1487a31d/pydantic_core-2.46.0-cp313-cp313-win32.whl", hash = "sha256:59d24ec8d5eaabad93097525a69d0f00f2667cb353eb6cda578b1cfff203ceef", size = 1965660, upload-time = "2026-04-13T09:06:05.877Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e7/1efc38ed6f2680c032bcefa0e3ebd496a8c77e92dfdb86b07d0f2fc632b1/pydantic_core-2.46.0-cp313-cp313-win_amd64.whl", hash = "sha256:71186dad5ac325c64d68fe0e654e15fd79802e7cc42bc6f0ff822d5ad8b1ab25", size = 2069563, upload-time = "2026-04-13T09:07:14.738Z" }, + { url = "https://files.pythonhosted.org/packages/c3/1e/a325b4989e742bf7e72ed35fa124bc611fd76539c9f8cd2a9a7854473533/pydantic_core-2.46.0-cp313-cp313-win_arm64.whl", hash = "sha256:8e4503f3213f723842c9a3b53955c88a9cfbd0b288cbd1c1ae933aebeec4a1b4", size = 2034966, upload-time = "2026-04-13T09:04:21.629Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/914891d384cdbf9a6f464eb13713baa22ea1e453d4da80fb7da522079370/pydantic_core-2.46.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:4fc801c290342350ffc82d77872054a934b2e24163727263362170c1db5416ca", size = 2113349, upload-time = "2026-04-13T09:04:59.407Z" }, + { url = "https://files.pythonhosted.org/packages/35/95/3a0c6f65e231709fb3463e32943c69d10285cb50203a2130a4732053a06d/pydantic_core-2.46.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0a36f2cc88170cc177930afcc633a8c15907ea68b59ac16bd180c2999d714940", size = 1949170, upload-time = "2026-04-13T09:06:09.935Z" }, + { url = "https://files.pythonhosted.org/packages/d1/63/d845c36a608469fe7bee226edeff0984c33dbfe7aecd755b0e7ab5a275c4/pydantic_core-2.46.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a3912e0c568a1f99d4d6d3e41def40179d61424c0ca1c8c87c4877d7f6fd7fb", size = 1977914, upload-time = "2026-04-13T09:04:56.16Z" }, + { url = "https://files.pythonhosted.org/packages/08/6f/f2e7a7f85931fb31671f5378d1c7fc70606e4b36d59b1b48e1bd1ef5d916/pydantic_core-2.46.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3534c3415ed1a19ab23096b628916a827f7858ec8db49ad5d7d1e44dc13c0d7b", size = 2050538, upload-time = "2026-04-13T09:05:06.789Z" }, + { url = "https://files.pythonhosted.org/packages/8c/97/f4aa7181dd9a16dd9059a99fc48fdab0c2aab68307283a5c04cf56de68c4/pydantic_core-2.46.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21067396fc285609323a4db2f63a87570044abe0acddfcca8b135fc7948e3db7", size = 2236294, upload-time = "2026-04-13T09:07:03.2Z" }, + { url = "https://files.pythonhosted.org/packages/24/c1/6a5042fc32765c87101b500f394702890af04239c318b6002cfd627b710d/pydantic_core-2.46.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2afd85b7be186e2fe7cdbb09a3d964bcc2042f65bbcc64ad800b3c7915032655", size = 2312954, upload-time = "2026-04-13T09:06:11.919Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e4/566101a561492ce8454f0844ca29c3b675a6b3a7b3ff577db85ed05c8c50/pydantic_core-2.46.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67e2c2e171b78db8154da602de72ffdc473c6ee51de8a9d80c0f1cd4051abfc7", size = 2102533, upload-time = "2026-04-13T09:06:58.664Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ac/adc11ee1646a5c4dd9abb09a00e7909e6dc25beddc0b1310ca734bb9b48e/pydantic_core-2.46.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:c16ae1f3170267b1a37e16dba5c297bdf60c8b5657b147909ca8774ce7366644", size = 2169447, upload-time = "2026-04-13T09:04:11.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/73/408e686b45b82d28ac19e8229e07282254dbee6a5d24c5c7cf3cf3716613/pydantic_core-2.46.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:133b69e1c1ba34d3702eed73f19f7f966928f9aa16663b55c2ebce0893cca42e", size = 2200672, upload-time = "2026-04-13T09:03:54.056Z" }, + { url = "https://files.pythonhosted.org/packages/0a/3b/807d5b035ec891b57b9079ce881f48263936c37bd0d154a056e7fd152afb/pydantic_core-2.46.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:15ed8e5bde505133d96b41702f31f06829c46b05488211a5b1c7877e11de5eb5", size = 2188293, upload-time = "2026-04-13T09:07:07.614Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ed/719b307516285099d1196c52769fdbe676fd677da007b9c349ae70b7226d/pydantic_core-2.46.0-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:8cfc29a1c66a7f0fcb36262e92f353dd0b9c4061d558fceb022e698a801cb8ae", size = 2335023, upload-time = "2026-04-13T09:04:05.176Z" }, + { url = "https://files.pythonhosted.org/packages/8d/90/8718e4ae98c4e8a7325afdc079be82be1e131d7a47cb6c098844a9531ffe/pydantic_core-2.46.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e1155708540f13845bf68d5ac511a55c76cfe2e057ed12b4bf3adac1581fc5c2", size = 2377155, upload-time = "2026-04-13T09:06:18.081Z" }, + { url = "https://files.pythonhosted.org/packages/dd/dc/7172789283b963f81da2fc92b186e22de55687019079f71c4d570822502b/pydantic_core-2.46.0-cp314-cp314-win32.whl", hash = "sha256:de5635a48df6b2eef161d10ea1bc2626153197333662ba4cd700ee7ec1aba7f5", size = 1963078, upload-time = "2026-04-13T09:05:30.615Z" }, + { url = "https://files.pythonhosted.org/packages/e0/69/03a7ea4b6264def3a44eabf577528bcec2f49468c5698b2044dea54dc07e/pydantic_core-2.46.0-cp314-cp314-win_amd64.whl", hash = "sha256:f07a5af60c5e7cf53dd1ff734228bd72d0dc9938e64a75b5bb308ca350d9681e", size = 2068439, upload-time = "2026-04-13T09:04:57.729Z" }, + { url = "https://files.pythonhosted.org/packages/f5/eb/1c3afcfdee2ab6634b802ab0a0f1966df4c8b630028ec56a1cb0a710dc58/pydantic_core-2.46.0-cp314-cp314-win_arm64.whl", hash = "sha256:e7a77eca3c7d5108ff509db20aae6f80d47c7ed7516d8b96c387aacc42f3ce0f", size = 2026470, upload-time = "2026-04-13T09:05:08.654Z" }, + { url = "https://files.pythonhosted.org/packages/5c/30/1177dde61b200785c4739665e3aa03a9d4b2c25d2d0408b07d585e633965/pydantic_core-2.46.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:5e7cdd4398bee1aaeafe049ac366b0f887451d9ae418fd8785219c13fea2f928", size = 2107447, upload-time = "2026-04-13T09:05:46.314Z" }, + { url = "https://files.pythonhosted.org/packages/b1/60/4e0f61f99bdabbbc309d364a2791e1ba31e778a4935bc43391a7bdec0744/pydantic_core-2.46.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5c2c92d82808e27cef3f7ab3ed63d657d0c755e0dbe5b8a58342e37bdf09bd2e", size = 1926927, upload-time = "2026-04-13T09:06:20.371Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d0/67f89a8269152c1d6eaa81f04e75a507372ebd8ca7382855a065222caa80/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bab80af91cd7014b45d1089303b5f844a9d91d7da60eabf3d5f9694b32a6655", size = 1966613, upload-time = "2026-04-13T09:07:05.389Z" }, + { url = "https://files.pythonhosted.org/packages/cd/07/8dfdc3edc78f29a80fb31f366c50203ec904cff6a4c923599bf50ac0d0ff/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e49ffdb714bc990f00b39d1ad1d683033875b5af15582f60c1f34ad3eeccfaa", size = 2032902, upload-time = "2026-04-13T09:06:42.47Z" }, + { url = "https://files.pythonhosted.org/packages/b0/2a/111c5e8fe24f99c46bcad7d3a82a8f6dbc738066e2c72c04c71f827d8c78/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca877240e8dbdeef3a66f751dc41e5a74893767d510c22a22fc5c0199844f0ce", size = 2244456, upload-time = "2026-04-13T09:05:36.484Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7c/cfc5d11c15a63ece26e148572c77cfbb2c7f08d315a7b63ef0fe0711d753/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87e6843f89ecd2f596d7294e33196c61343186255b9880c4f1b725fde8b0e20d", size = 2294535, upload-time = "2026-04-13T09:06:01.689Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2c/f0d744e3dab7bd026a3f4670a97a295157cff923a2666d30a15a70a7e3d0/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e20bc5add1dd9bc3b9a3600d40632e679376569098345500799a6ad7c5d46c72", size = 2104621, upload-time = "2026-04-13T09:04:34.388Z" }, + { url = "https://files.pythonhosted.org/packages/a7/64/e7cc4698dc024264d214b51d5a47a2404221b12060dd537d76f831b2120a/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:ee6ff79a5f0289d64a9d6696a3ce1f98f925b803dd538335a118231e26d6d827", size = 2130718, upload-time = "2026-04-13T09:04:26.23Z" }, + { url = "https://files.pythonhosted.org/packages/0b/a8/224e655fec21f7d4441438ad2ecaccb33b5a3876ce7bb2098c74a49efc14/pydantic_core-2.46.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:52d35cfb58c26323101c7065508d7bb69bb56338cda9ea47a7b32be581af055d", size = 2180738, upload-time = "2026-04-13T09:05:50.253Z" }, + { url = "https://files.pythonhosted.org/packages/32/7b/b3025618ed4c4e4cbaa9882731c19625db6669896b621760ea95bc1125ef/pydantic_core-2.46.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d14cc5a6f260fa78e124061eebc5769af6534fc837e9a62a47f09a2c341fa4ea", size = 2171222, upload-time = "2026-04-13T09:07:29.929Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e3/68170aa1d891920af09c1f2f34df61dc5ff3a746400027155523e3400e89/pydantic_core-2.46.0-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:4f7ff859d663b6635f6307a10803d07f0d09487e16c3d36b1744af51dbf948b2", size = 2320040, upload-time = "2026-04-13T09:06:35.732Z" }, + { url = "https://files.pythonhosted.org/packages/67/1b/5e65807001b84972476300c1f49aea2b4971b7e9fffb5c2654877dadd274/pydantic_core-2.46.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:8ef749be6ed0d69dba31902aaa8255a9bb269ae50c93888c4df242d8bb7acd9e", size = 2377062, upload-time = "2026-04-13T09:07:39.945Z" }, + { url = "https://files.pythonhosted.org/packages/75/03/48caa9dd5f28f7662bd52bff454d9a451f6b7e5e4af95e289e5e170749c9/pydantic_core-2.46.0-cp314-cp314t-win32.whl", hash = "sha256:d93ca72870133f86360e4bb0c78cd4e6ba2a0f9f3738a6486909ffc031463b32", size = 1951028, upload-time = "2026-04-13T09:04:20.224Z" }, + { url = "https://files.pythonhosted.org/packages/87/ed/e97ff55fe28c0e6e3cba641d622b15e071370b70e5f07c496b07b65db7c9/pydantic_core-2.46.0-cp314-cp314t-win_amd64.whl", hash = "sha256:6ebb2668afd657e2127cb40f2ceb627dd78e74e9dfde14d9bf6cdd532a29ff59", size = 2048519, upload-time = "2026-04-13T09:05:10.464Z" }, + { url = "https://files.pythonhosted.org/packages/b6/51/e0db8267a287994546925f252e329eeae4121b1e77e76353418da5a3adf0/pydantic_core-2.46.0-cp314-cp314t-win_arm64.whl", hash = "sha256:4864f5bbb7993845baf9209bae1669a8a76769296a018cb569ebda9dcb4241f5", size = 2026791, upload-time = "2026-04-13T09:04:37.724Z" }, + { url = "https://files.pythonhosted.org/packages/74/0c/106ed5cc50393d90523f09adcc50d05e42e748eb107dc06aea971137f02d/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:bc0e2fefe384152d7da85b5c2fe8ce2bf24752f68a58e3f3ea42e28a29dfdeb2", size = 2104968, upload-time = "2026-04-13T09:06:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/f5/71/b494cef3165e3413ee9bbbb5a9eedc9af0ea7b88d8638beef6c2061b110e/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:a2ab0e785548be1b4362a62c4004f9217598b7ee465f1f420fc2123e2a5b5b02", size = 1940442, upload-time = "2026-04-13T09:06:29.332Z" }, + { url = "https://files.pythonhosted.org/packages/7e/3e/a4d578c8216c443e26a1124f8c1e07c0654264ce5651143d3883d85ff140/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d45aecb18b8cba1c68eeb17c2bb2d38627ceed04c5b30b882fc9134e01f187", size = 1999672, upload-time = "2026-04-13T09:04:42.798Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c1/9114560468685525a21770138382fd0cb849aaf351ff2c7b97f760d121e0/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5078f6c377b002428e984259ac327ef8902aacae6c14b7de740dd4869a491501", size = 2154533, upload-time = "2026-04-13T09:04:50.868Z" }, ] [[package]] name = "pydantic-graph" -version = "1.40.0" +version = "1.80.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -1305,41 +1872,41 @@ dependencies = [ { name = "pydantic" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/fd/76ca11048717e11ccd3306d86d294e19cc606b6ca2f11bb65f724c993424/pydantic_graph-1.40.0.tar.gz", hash = "sha256:590f57640143507601cf25652de7ce13fff2354d874edae479226a8054d65ea4", size = 58453, upload-time = "2026-01-07T01:35:41.492Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/7b/03a8791e4916cb0f841fcd77ef6b6bf504419bf03d1c16e4ef80bfd553ad/pydantic_graph-1.80.0.tar.gz", hash = "sha256:94b8c2dd20730ce3cd0fa544ca9c31011a7bb0c5b9f5ca1dade6a6bed7719e8c", size = 59243, upload-time = "2026-04-10T23:31:22.504Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/24/a1cf828d257177c693460538220357df221d4e0ab00d99f4187f2f845569/pydantic_graph-1.40.0-py3-none-any.whl", hash = "sha256:6a308bb980cb3bb891f2ca412909f1e1e88d3f72934d98378f778615a5228cc8", size = 72325, upload-time = "2026-01-07T01:35:33.876Z" }, + { url = "https://files.pythonhosted.org/packages/38/12/483b7402d302021ff8537a746eebf018f4e0bb5892c7bef769ab968e03c1/pydantic_graph-1.80.0-py3-none-any.whl", hash = "sha256:60315c2042597d0377689ad48e9439760ec75d4ccda78830d2890ce9c94c6d84", size = 73065, upload-time = "2026-04-10T23:31:15.382Z" }, ] [[package]] name = "pydantic-settings" -version = "2.12.0" +version = "2.13.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, + { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" }, ] [[package]] name = "pygments" -version = "2.19.2" +version = "2.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, ] [[package]] name = "pyjwt" -version = "2.10.1" +version = "2.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/27/a3b6e5bf6ff856d2509292e95c8f57f0df7017cf5394921fc4e4ef40308a/pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b", size = 102564, upload-time = "2026-03-13T19:27:37.25Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, + { url = "https://files.pythonhosted.org/packages/e5/7a/8dd906bd22e79e47397a61742927f6747fe93242ef86645ee9092e610244/pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c", size = 29726, upload-time = "2026-03-13T19:27:35.677Z" }, ] [package.optional-dependencies] @@ -1349,11 +1916,11 @@ crypto = [ [[package]] name = "pyparsing" -version = "3.3.1" +version = "3.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/33/c1/1d9de9aeaa1b89b0186e5fe23294ff6517fce1bc69149185577cd31016b2/pyparsing-3.3.1.tar.gz", hash = "sha256:47fad0f17ac1e2cad3de3b458570fbc9b03560aa029ed5e16ee5554da9a2251c", size = 1550512, upload-time = "2025-12-23T03:14:04.391Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/91/9c6ee907786a473bf81c5f53cf703ba0957b23ab84c264080fb5a450416f/pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc", size = 6851574, upload-time = "2026-01-21T03:57:59.36Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/40/2614036cdd416452f5bf98ec037f38a1afb17f327cb8e6b652d4729e0af8/pyparsing-3.3.1-py3-none-any.whl", hash = "sha256:023b5e7e5520ad96642e2c6db4cb683d3970bd640cdf7115049a6e9c3682df82", size = 121793, upload-time = "2025-12-23T03:14:02.103Z" }, + { url = "https://files.pythonhosted.org/packages/10/bd/c038d7cc38edc1aa5bf91ab8068b63d4308c66c4c8bb3cbba7dfbc049f9c/pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d", size = 122781, upload-time = "2026-01-21T03:57:55.912Z" }, ] [[package]] @@ -1367,20 +1934,20 @@ wheels = [ [[package]] name = "pyright" -version = "1.1.408" +version = "1.1.409" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" } +sdist = { url = "https://files.pythonhosted.org/packages/51/4e/3aa27f74211522dba7e9cbc3e74de779c6d4b654c54e50a4840623be8014/pyright-1.1.409.tar.gz", hash = "sha256:986ee05beca9e077c165758ad123667c679e050059a2546aa02473930394bc93", size = 4430434, upload-time = "2026-04-23T11:02:03.799Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" }, + { url = "https://files.pythonhosted.org/packages/16/6b/330d8ebae582b30c2959a1ef4c3bc344ebde48c2ff0c3f113c4710735e11/pyright-1.1.409-py3-none-any.whl", hash = "sha256:aa3ea228cab90c845c7a60d28db7a844c04315356392aa09fafcee98c8c22fb3", size = 6438161, upload-time = "2026-04-23T11:02:01.309Z" }, ] [[package]] name = "pytest" -version = "9.0.2" +version = "9.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -1389,9 +1956,9 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, ] [[package]] @@ -1421,29 +1988,49 @@ wheels = [ [[package]] name = "python-dotenv" -version = "1.2.1" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] [[package]] name = "python-multipart" -version = "0.0.22" +version = "0.0.26" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, + { url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" }, ] [[package]] name = "pytokens" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/8d/a762be14dae1c3bf280202ba3172020b2b0b4c537f94427435f19c413b72/pytokens-0.3.0.tar.gz", hash = "sha256:2f932b14ed08de5fcf0b391ace2642f858f1394c0857202959000b68ed7a458a", size = 17644, upload-time = "2025-11-05T13:36:35.34Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/25/d9db8be44e205a124f6c98bc0324b2bb149b7431c53877fc6d1038dddaf5/pytokens-0.3.0-py3-none-any.whl", hash = "sha256:95b2b5eaf832e469d141a378872480ede3f251a5a5041b8ec6e581d3ac71bbf3", size = 12195, upload-time = "2025-11-05T13:36:33.183Z" }, +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, + { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, + { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, + { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, + { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, + { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, ] [[package]] @@ -1478,85 +2065,95 @@ wheels = [ [[package]] name = "regex" -version = "2025.11.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, - { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, - { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, - { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, - { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, - { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, - { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, - { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, - { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, - { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, - { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, - { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, - { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, - { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, - { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, - { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, - { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, - { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, - { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, - { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, - { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, - { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, - { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, - { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, - { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, - { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, - { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, - { url = "https://files.pythonhosted.org/packages/31/e9/f6e13de7e0983837f7b6d238ad9458800a874bf37c264f7923e63409944c/regex-2025.11.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9697a52e57576c83139d7c6f213d64485d3df5bf84807c35fa409e6c970801c6", size = 489089, upload-time = "2025-11-03T21:32:50.027Z" }, - { url = "https://files.pythonhosted.org/packages/a3/5c/261f4a262f1fa65141c1b74b255988bd2fa020cc599e53b080667d591cfc/regex-2025.11.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e18bc3f73bd41243c9b38a6d9f2366cd0e0137a9aebe2d8ff76c5b67d4c0a3f4", size = 291059, upload-time = "2025-11-03T21:32:51.682Z" }, - { url = "https://files.pythonhosted.org/packages/8e/57/f14eeb7f072b0e9a5a090d1712741fd8f214ec193dba773cf5410108bb7d/regex-2025.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:61a08bcb0ec14ff4e0ed2044aad948d0659604f824cbd50b55e30b0ec6f09c73", size = 288900, upload-time = "2025-11-03T21:32:53.569Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6b/1d650c45e99a9b327586739d926a1cd4e94666b1bd4af90428b36af66dc7/regex-2025.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9c30003b9347c24bcc210958c5d167b9e4f9be786cb380a7d32f14f9b84674f", size = 799010, upload-time = "2025-11-03T21:32:55.222Z" }, - { url = "https://files.pythonhosted.org/packages/99/ee/d66dcbc6b628ce4e3f7f0cbbb84603aa2fc0ffc878babc857726b8aab2e9/regex-2025.11.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4e1e592789704459900728d88d41a46fe3969b82ab62945560a31732ffc19a6d", size = 864893, upload-time = "2025-11-03T21:32:57.239Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2d/f238229f1caba7ac87a6c4153d79947fb0261415827ae0f77c304260c7d3/regex-2025.11.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6538241f45eb5a25aa575dbba1069ad786f68a4f2773a29a2bd3dd1f9de787be", size = 911522, upload-time = "2025-11-03T21:32:59.274Z" }, - { url = "https://files.pythonhosted.org/packages/bd/3d/22a4eaba214a917c80e04f6025d26143690f0419511e0116508e24b11c9b/regex-2025.11.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce22519c989bb72a7e6b36a199384c53db7722fe669ba891da75907fe3587db", size = 803272, upload-time = "2025-11-03T21:33:01.393Z" }, - { url = "https://files.pythonhosted.org/packages/84/b1/03188f634a409353a84b5ef49754b97dbcc0c0f6fd6c8ede505a8960a0a4/regex-2025.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:66d559b21d3640203ab9075797a55165d79017520685fb407b9234d72ab63c62", size = 787958, upload-time = "2025-11-03T21:33:03.379Z" }, - { url = "https://files.pythonhosted.org/packages/99/6a/27d072f7fbf6fadd59c64d210305e1ff865cc3b78b526fd147db768c553b/regex-2025.11.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:669dcfb2e38f9e8c69507bace46f4889e3abbfd9b0c29719202883c0a603598f", size = 859289, upload-time = "2025-11-03T21:33:05.374Z" }, - { url = "https://files.pythonhosted.org/packages/9a/70/1b3878f648e0b6abe023172dacb02157e685564853cc363d9961bcccde4e/regex-2025.11.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:32f74f35ff0f25a5021373ac61442edcb150731fbaa28286bbc8bb1582c89d02", size = 850026, upload-time = "2025-11-03T21:33:07.131Z" }, - { url = "https://files.pythonhosted.org/packages/dd/d5/68e25559b526b8baab8e66839304ede68ff6727237a47727d240006bd0ff/regex-2025.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e6c7a21dffba883234baefe91bc3388e629779582038f75d2a5be918e250f0ed", size = 789499, upload-time = "2025-11-03T21:33:09.141Z" }, - { url = "https://files.pythonhosted.org/packages/fc/df/43971264857140a350910d4e33df725e8c94dd9dee8d2e4729fa0d63d49e/regex-2025.11.3-cp314-cp314-win32.whl", hash = "sha256:795ea137b1d809eb6836b43748b12634291c0ed55ad50a7d72d21edf1cd565c4", size = 271604, upload-time = "2025-11-03T21:33:10.9Z" }, - { url = "https://files.pythonhosted.org/packages/01/6f/9711b57dc6894a55faf80a4c1b5aa4f8649805cb9c7aef46f7d27e2b9206/regex-2025.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:9f95fbaa0ee1610ec0fc6b26668e9917a582ba80c52cc6d9ada15e30aa9ab9ad", size = 280320, upload-time = "2025-11-03T21:33:12.572Z" }, - { url = "https://files.pythonhosted.org/packages/f1/7e/f6eaa207d4377481f5e1775cdeb5a443b5a59b392d0065f3417d31d80f87/regex-2025.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:dfec44d532be4c07088c3de2876130ff0fbeeacaa89a137decbbb5f665855a0f", size = 273372, upload-time = "2025-11-03T21:33:14.219Z" }, - { url = "https://files.pythonhosted.org/packages/c3/06/49b198550ee0f5e4184271cee87ba4dfd9692c91ec55289e6282f0f86ccf/regex-2025.11.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ba0d8a5d7f04f73ee7d01d974d47c5834f8a1b0224390e4fe7c12a3a92a78ecc", size = 491985, upload-time = "2025-11-03T21:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/ce/bf/abdafade008f0b1c9da10d934034cb670432d6cf6cbe38bbb53a1cfd6cf8/regex-2025.11.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:442d86cf1cfe4faabf97db7d901ef58347efd004934da045c745e7b5bd57ac49", size = 292669, upload-time = "2025-11-03T21:33:18.32Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ef/0c357bb8edbd2ad8e273fcb9e1761bc37b8acbc6e1be050bebd6475f19c1/regex-2025.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fd0a5e563c756de210bb964789b5abe4f114dacae9104a47e1a649b910361536", size = 291030, upload-time = "2025-11-03T21:33:20.048Z" }, - { url = "https://files.pythonhosted.org/packages/79/06/edbb67257596649b8fb088d6aeacbcb248ac195714b18a65e018bf4c0b50/regex-2025.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf3490bcbb985a1ae97b2ce9ad1c0f06a852d5b19dde9b07bdf25bf224248c95", size = 807674, upload-time = "2025-11-03T21:33:21.797Z" }, - { url = "https://files.pythonhosted.org/packages/f4/d9/ad4deccfce0ea336296bd087f1a191543bb99ee1c53093dcd4c64d951d00/regex-2025.11.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3809988f0a8b8c9dcc0f92478d6501fac7200b9ec56aecf0ec21f4a2ec4b6009", size = 873451, upload-time = "2025-11-03T21:33:23.741Z" }, - { url = "https://files.pythonhosted.org/packages/13/75/a55a4724c56ef13e3e04acaab29df26582f6978c000ac9cd6810ad1f341f/regex-2025.11.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f4ff94e58e84aedb9c9fce66d4ef9f27a190285b451420f297c9a09f2b9abee9", size = 914980, upload-time = "2025-11-03T21:33:25.999Z" }, - { url = "https://files.pythonhosted.org/packages/67/1e/a1657ee15bd9116f70d4a530c736983eed997b361e20ecd8f5ca3759d5c5/regex-2025.11.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eb542fd347ce61e1321b0a6b945d5701528dca0cd9759c2e3bb8bd57e47964d", size = 812852, upload-time = "2025-11-03T21:33:27.852Z" }, - { url = "https://files.pythonhosted.org/packages/b8/6f/f7516dde5506a588a561d296b2d0044839de06035bb486b326065b4c101e/regex-2025.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2d5919075a1f2e413c00b056ea0c2f065b3f5fe83c3d07d325ab92dce51d6", size = 795566, upload-time = "2025-11-03T21:33:32.364Z" }, - { url = "https://files.pythonhosted.org/packages/d9/dd/3d10b9e170cc16fb34cb2cef91513cf3df65f440b3366030631b2984a264/regex-2025.11.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:3f8bf11a4827cc7ce5a53d4ef6cddd5ad25595d3c1435ef08f76825851343154", size = 868463, upload-time = "2025-11-03T21:33:34.459Z" }, - { url = "https://files.pythonhosted.org/packages/f5/8e/935e6beff1695aa9085ff83195daccd72acc82c81793df480f34569330de/regex-2025.11.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:22c12d837298651e5550ac1d964e4ff57c3f56965fc1812c90c9fb2028eaf267", size = 854694, upload-time = "2025-11-03T21:33:36.793Z" }, - { url = "https://files.pythonhosted.org/packages/92/12/10650181a040978b2f5720a6a74d44f841371a3d984c2083fc1752e4acf6/regex-2025.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:62ba394a3dda9ad41c7c780f60f6e4a70988741415ae96f6d1bf6c239cf01379", size = 799691, upload-time = "2025-11-03T21:33:39.079Z" }, - { url = "https://files.pythonhosted.org/packages/67/90/8f37138181c9a7690e7e4cb388debbd389342db3c7381d636d2875940752/regex-2025.11.3-cp314-cp314t-win32.whl", hash = "sha256:4bf146dca15cdd53224a1bf46d628bd7590e4a07fbb69e720d561aea43a32b38", size = 274583, upload-time = "2025-11-03T21:33:41.302Z" }, - { url = "https://files.pythonhosted.org/packages/8f/cd/867f5ec442d56beb56f5f854f40abcfc75e11d10b11fdb1869dd39c63aaf/regex-2025.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:adad1a1bcf1c9e76346e091d22d23ac54ef28e1365117d99521631078dfec9de", size = 284286, upload-time = "2025-11-03T21:33:43.324Z" }, - { url = "https://files.pythonhosted.org/packages/20/31/32c0c4610cbc070362bf1d2e4ea86d1ea29014d400a6d6c2486fcfd57766/regex-2025.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:c54f768482cef41e219720013cd05933b6f971d9562544d691c68699bf2b6801", size = 274741, upload-time = "2025-11-03T21:33:45.557Z" }, +version = "2026.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/3a246dbf05666918bd3664d9d787f84a9108f6f43cc953a077e4a7dfdb7e/regex-2026.4.4.tar.gz", hash = "sha256:e08270659717f6973523ce3afbafa53515c4dc5dcad637dc215b6fd50f689423", size = 416000, upload-time = "2026-04-03T20:56:28.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/28/b972a4d3df61e1d7bcf1b59fdb3cddef22f88b6be43f161bb41ebc0e4081/regex-2026.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c07ab8794fa929e58d97a0e1796b8b76f70943fa39df225ac9964615cf1f9d52", size = 490434, upload-time = "2026-04-03T20:53:40.219Z" }, + { url = "https://files.pythonhosted.org/packages/84/20/30041446cf6dc3e0eab344fc62770e84c23b6b68a3b657821f9f80cb69b4/regex-2026.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c785939dc023a1ce4ec09599c032cc9933d258a998d16ca6f2b596c010940eb", size = 292061, upload-time = "2026-04-03T20:53:41.862Z" }, + { url = "https://files.pythonhosted.org/packages/62/c8/3baa06d75c98c46d4cc4262b71fd2edb9062b5665e868bca57859dadf93a/regex-2026.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b1ce5c81c9114f1ce2f9288a51a8fd3aeea33a0cc440c415bf02da323aa0a76", size = 289628, upload-time = "2026-04-03T20:53:43.701Z" }, + { url = "https://files.pythonhosted.org/packages/31/87/3accf55634caad8c0acab23f5135ef7d4a21c39f28c55c816ae012931408/regex-2026.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:760ef21c17d8e6a4fe8cf406a97cf2806a4df93416ccc82fc98d25b1c20425be", size = 796651, upload-time = "2026-04-03T20:53:45.379Z" }, + { url = "https://files.pythonhosted.org/packages/f6/0c/aaa2c83f34efedbf06f61cb1942c25f6cf1ee3b200f832c4d05f28306c2e/regex-2026.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7088fcdcb604a4417c208e2169715800d28838fefd7455fbe40416231d1d47c1", size = 865916, upload-time = "2026-04-03T20:53:47.064Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f6/8c6924c865124643e8f37823eca845dc27ac509b2ee58123685e71cd0279/regex-2026.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:07edca1ba687998968f7db5bc355288d0c6505caa7374f013d27356d93976d13", size = 912287, upload-time = "2026-04-03T20:53:49.422Z" }, + { url = "https://files.pythonhosted.org/packages/11/0e/a9f6f81013e0deaf559b25711623864970fe6a098314e374ccb1540a4152/regex-2026.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:993f657a7c1c6ec51b5e0ba97c9817d06b84ea5fa8d82e43b9405de0defdc2b9", size = 801126, upload-time = "2026-04-03T20:53:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/61/3a0cc8af2dc0c8deb48e644dd2521f173f7e6513c6e195aad9aa8dd77ac5/regex-2026.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2b69102a743e7569ebee67e634a69c4cb7e59d6fa2e1aa7d3bdbf3f61435f62d", size = 776788, upload-time = "2026-04-03T20:53:52.889Z" }, + { url = "https://files.pythonhosted.org/packages/64/0b/8bb9cbf21ef7dee58e49b0fdb066a7aded146c823202e16494a36777594f/regex-2026.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dac006c8b6dda72d86ea3d1333d45147de79a3a3f26f10c1cf9287ca4ca0ac3", size = 785184, upload-time = "2026-04-03T20:53:55.627Z" }, + { url = "https://files.pythonhosted.org/packages/99/c2/d3e80e8137b25ee06c92627de4e4d98b94830e02b3e6f81f3d2e3f504cf5/regex-2026.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:50a766ee2010d504554bfb5f578ed2e066898aa26411d57e6296230627cdefa0", size = 859913, upload-time = "2026-04-03T20:53:57.249Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/9d5d876157d969c804622456ef250017ac7a8f83e0e14f903b9e6df5ce95/regex-2026.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9e2f5217648f68e3028c823df58663587c1507a5ba8419f4fdfc8a461be76043", size = 765732, upload-time = "2026-04-03T20:53:59.428Z" }, + { url = "https://files.pythonhosted.org/packages/82/80/b568935b4421388561c8ed42aff77247285d3ae3bb2a6ca22af63bae805e/regex-2026.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:39d8de85a08e32632974151ba59c6e9140646dcc36c80423962b1c5c0a92e244", size = 852152, upload-time = "2026-04-03T20:54:01.505Z" }, + { url = "https://files.pythonhosted.org/packages/39/29/f0f81217e21cd998245da047405366385d5c6072048038a3d33b37a79dc0/regex-2026.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55d9304e0e7178dfb1e106c33edf834097ddf4a890e2f676f6c5118f84390f73", size = 789076, upload-time = "2026-04-03T20:54:03.323Z" }, + { url = "https://files.pythonhosted.org/packages/49/1d/1d957a61976ab9d4e767dd4f9d04b66cc0c41c5e36cf40e2d43688b5ae6f/regex-2026.4.4-cp312-cp312-win32.whl", hash = "sha256:04bb679bc0bde8a7bfb71e991493d47314e7b98380b083df2447cda4b6edb60f", size = 266700, upload-time = "2026-04-03T20:54:05.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/bf575d396aeb58ea13b06ef2adf624f65b70fafef6950a80fc3da9cae3bc/regex-2026.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:db0ac18435a40a2543dbb3d21e161a6c78e33e8159bd2e009343d224bb03bb1b", size = 277768, upload-time = "2026-04-03T20:54:07.312Z" }, + { url = "https://files.pythonhosted.org/packages/c9/27/049df16ec6a6828ccd72add3c7f54b4df029669bea8e9817df6fff58be90/regex-2026.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:4ce255cc05c1947a12989c6db801c96461947adb7a59990f1360b5983fab4983", size = 270568, upload-time = "2026-04-03T20:54:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/9d/83/c4373bc5f31f2cf4b66f9b7c31005bd87fe66f0dce17701f7db4ee79ee29/regex-2026.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:62f5519042c101762509b1d717b45a69c0139d60414b3c604b81328c01bd1943", size = 490273, upload-time = "2026-04-03T20:54:11.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/f8/fe62afbcc3cf4ad4ac9adeaafd98aa747869ae12d3e8e2ac293d0593c435/regex-2026.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3790ba9fb5dd76715a7afe34dbe603ba03f8820764b1dc929dd08106214ed031", size = 291954, upload-time = "2026-04-03T20:54:13.412Z" }, + { url = "https://files.pythonhosted.org/packages/5a/92/4712b9fe6a33d232eeb1c189484b80c6c4b8422b90e766e1195d6e758207/regex-2026.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8fae3c6e795d7678963f2170152b0d892cf6aee9ee8afc8c45e6be38d5107fe7", size = 289487, upload-time = "2026-04-03T20:54:15.824Z" }, + { url = "https://files.pythonhosted.org/packages/88/2c/f83b93f85e01168f1070f045a42d4c937b69fdb8dd7ae82d307253f7e36e/regex-2026.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:298c3ec2d53225b3bf91142eb9691025bab610e0c0c51592dde149db679b3d17", size = 796646, upload-time = "2026-04-03T20:54:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/df/55/61a2e17bf0c4dc57e11caf8dd11771280d8aaa361785f9e3bc40d653f4a7/regex-2026.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e9638791082eaf5b3ac112c587518ee78e083a11c4b28012d8fe2a0f536dfb17", size = 865904, upload-time = "2026-04-03T20:54:20.019Z" }, + { url = "https://files.pythonhosted.org/packages/45/32/1ac8ed1b5a346b5993a3d256abe0a0f03b0b73c8cc88d928537368ac65b6/regex-2026.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae3e764bd4c5ff55035dc82a8d49acceb42a5298edf6eb2fc4d328ee5dd7afae", size = 912304, upload-time = "2026-04-03T20:54:22.403Z" }, + { url = "https://files.pythonhosted.org/packages/26/47/2ee5c613ab546f0eddebf9905d23e07beb933416b1246c2d8791d01979b4/regex-2026.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ffa81f81b80047ba89a3c69ae6a0f78d06f4a42ce5126b0eb2a0a10ad44e0b2e", size = 801126, upload-time = "2026-04-03T20:54:24.308Z" }, + { url = "https://files.pythonhosted.org/packages/75/cd/41dacd129ca9fd20bd7d02f83e0fad83e034ac8a084ec369c90f55ef37e2/regex-2026.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f56ebf9d70305307a707911b88469213630aba821e77de7d603f9d2f0730687d", size = 776772, upload-time = "2026-04-03T20:54:26.319Z" }, + { url = "https://files.pythonhosted.org/packages/89/6d/5af0b588174cb5f46041fa7dd64d3fd5cd2fe51f18766703d1edc387f324/regex-2026.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:773d1dfd652bbffb09336abf890bfd64785c7463716bf766d0eb3bc19c8b7f27", size = 785228, upload-time = "2026-04-03T20:54:28.387Z" }, + { url = "https://files.pythonhosted.org/packages/b7/3b/f5a72b7045bd59575fc33bf1345f156fcfd5a8484aea6ad84b12c5a82114/regex-2026.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d51d20befd5275d092cdffba57ded05f3c436317ee56466c8928ac32d960edaf", size = 860032, upload-time = "2026-04-03T20:54:30.641Z" }, + { url = "https://files.pythonhosted.org/packages/39/a4/72a317003d6fcd7a573584a85f59f525dfe8f67e355ca74eb6b53d66a5e2/regex-2026.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0a51cdb3c1e9161154f976cb2bef9894bc063ac82f31b733087ffb8e880137d0", size = 765714, upload-time = "2026-04-03T20:54:32.789Z" }, + { url = "https://files.pythonhosted.org/packages/25/1e/5672e16f34dbbcb2560cc7e6a2fbb26dfa8b270711e730101da4423d3973/regex-2026.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ae5266a82596114e41fb5302140e9630204c1b5f325c770bec654b95dd54b0aa", size = 852078, upload-time = "2026-04-03T20:54:34.546Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0d/c813f0af7c6cc7ed7b9558bac2e5120b60ad0fa48f813e4d4bd55446f214/regex-2026.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c882cd92ec68585e9c1cf36c447ec846c0d94edd706fe59e0c198e65822fd23b", size = 789181, upload-time = "2026-04-03T20:54:36.642Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a344608d1adbd2a95090ddd906cec09a11be0e6517e878d02a5123e0917f/regex-2026.4.4-cp313-cp313-win32.whl", hash = "sha256:05568c4fbf3cb4fa9e28e3af198c40d3237cf6041608a9022285fe567ec3ad62", size = 266690, upload-time = "2026-04-03T20:54:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/54049f89b46235ca6f45cd6c88668a7050e77d4a15555e47dd40fde75263/regex-2026.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:3384df51ed52db0bea967e21458ab0a414f67cdddfd94401688274e55147bb81", size = 277733, upload-time = "2026-04-03T20:54:40.11Z" }, + { url = "https://files.pythonhosted.org/packages/0e/21/61366a8e20f4d43fb597708cac7f0e2baadb491ecc9549b4980b2be27d16/regex-2026.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:acd38177bd2c8e69a411d6521760806042e244d0ef94e2dd03ecdaa8a3c99427", size = 270565, upload-time = "2026-04-03T20:54:41.883Z" }, + { url = "https://files.pythonhosted.org/packages/f1/1e/3a2b9672433bef02f5d39aa1143ca2c08f311c1d041c464a42be9ae648dc/regex-2026.4.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f94a11a9d05afcfcfa640e096319720a19cc0c9f7768e1a61fceee6a3afc6c7c", size = 494126, upload-time = "2026-04-03T20:54:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/4e/4b/c132a4f4fe18ad3340d89fcb56235132b69559136036b845be3c073142ed/regex-2026.4.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:36bcb9d6d1307ab629edc553775baada2aefa5c50ccc0215fbfd2afcfff43141", size = 293882, upload-time = "2026-04-03T20:54:45.41Z" }, + { url = "https://files.pythonhosted.org/packages/f4/5f/eaa38092ce7a023656280f2341dbbd4ad5f05d780a70abba7bb4f4bea54c/regex-2026.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:261c015b3e2ed0919157046d768774ecde57f03d8fa4ba78d29793447f70e717", size = 292334, upload-time = "2026-04-03T20:54:47.051Z" }, + { url = "https://files.pythonhosted.org/packages/5f/f6/dd38146af1392dac33db7074ab331cec23cced3759167735c42c5460a243/regex-2026.4.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c228cf65b4a54583763645dcd73819b3b381ca8b4bb1b349dee1c135f4112c07", size = 811691, upload-time = "2026-04-03T20:54:49.074Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f0/dc54c2e69f5eeec50601054998ec3690d5344277e782bd717e49867c1d29/regex-2026.4.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dd2630faeb6876fb0c287f664d93ddce4d50cd46c6e88e60378c05c9047e08ca", size = 871227, upload-time = "2026-04-03T20:54:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/a1/af/cb16bd5dc61621e27df919a4449bbb7e5a1034c34d307e0a706e9cc0f3e3/regex-2026.4.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6a50ab11b7779b849472337191f3a043e27e17f71555f98d0092fa6d73364520", size = 917435, upload-time = "2026-04-03T20:54:52.994Z" }, + { url = "https://files.pythonhosted.org/packages/5c/71/8b260897f22996b666edd9402861668f45a2ca259f665ac029e6104a2d7d/regex-2026.4.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0734f63afe785138549fbe822a8cfeaccd1bae814c5057cc0ed5b9f2de4fc883", size = 816358, upload-time = "2026-04-03T20:54:54.884Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/775f7f72a510ef238254906c2f3d737fc80b16ca85f07d20e318d2eea894/regex-2026.4.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4ee50606cb1967db7e523224e05f32089101945f859928e65657a2cbb3d278b", size = 785549, upload-time = "2026-04-03T20:54:57.01Z" }, + { url = "https://files.pythonhosted.org/packages/58/42/34d289b3627c03cf381e44da534a0021664188fa49ba41513da0b4ec6776/regex-2026.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6c1818f37be3ca02dcb76d63f2c7aaba4b0dc171b579796c6fbe00148dfec6b1", size = 801364, upload-time = "2026-04-03T20:54:58.981Z" }, + { url = "https://files.pythonhosted.org/packages/fc/20/f6ecf319b382a8f1ab529e898b222c3f30600fcede7834733c26279e7465/regex-2026.4.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f5bfc2741d150d0be3e4a0401a5c22b06e60acb9aa4daa46d9e79a6dcd0f135b", size = 866221, upload-time = "2026-04-03T20:55:00.88Z" }, + { url = "https://files.pythonhosted.org/packages/92/6a/9f16d3609d549bd96d7a0b2aee1625d7512ba6a03efc01652149ef88e74d/regex-2026.4.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:504ffa8a03609a087cad81277a629b6ce884b51a24bd388a7980ad61748618ff", size = 772530, upload-time = "2026-04-03T20:55:03.213Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f6/aa9768bc96a4c361ac96419fbaf2dcdc33970bb813df3ba9b09d5d7b6d96/regex-2026.4.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70aadc6ff12e4b444586e57fc30771f86253f9f0045b29016b9605b4be5f7dfb", size = 856989, upload-time = "2026-04-03T20:55:05.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/b4/c671db3556be2473ae3e4bb7a297c518d281452871501221251ea4ecba57/regex-2026.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f4f83781191007b6ef43b03debc35435f10cad9b96e16d147efe84a1d48bdde4", size = 803241, upload-time = "2026-04-03T20:55:07.162Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5c/83e3b1d89fa4f6e5a1bc97b4abd4a9a97b3c1ac7854164f694f5f0ba98a0/regex-2026.4.4-cp313-cp313t-win32.whl", hash = "sha256:e014a797de43d1847df957c0a2a8e861d1c17547ee08467d1db2c370b7568baa", size = 269921, upload-time = "2026-04-03T20:55:09.62Z" }, + { url = "https://files.pythonhosted.org/packages/28/07/077c387121f42cdb4d92b1301133c0d93b5709d096d1669ab847dda9fe2e/regex-2026.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:b15b88b0d52b179712632832c1d6e58e5774f93717849a41096880442da41ab0", size = 281240, upload-time = "2026-04-03T20:55:11.521Z" }, + { url = "https://files.pythonhosted.org/packages/9d/22/ead4a4abc7c59a4d882662aa292ca02c8b617f30b6e163bc1728879e9353/regex-2026.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:586b89cdadf7d67bf86ae3342a4dcd2b8d70a832d90c18a0ae955105caf34dbe", size = 272440, upload-time = "2026-04-03T20:55:13.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f5/ed97c2dc47b5fbd4b73c0d7d75f9ebc8eca139f2bbef476bba35f28c0a77/regex-2026.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2da82d643fa698e5e5210e54af90181603d5853cf469f5eedf9bfc8f59b4b8c7", size = 490343, upload-time = "2026-04-03T20:55:15.241Z" }, + { url = "https://files.pythonhosted.org/packages/80/e9/de4828a7385ec166d673a5790ad06ac48cdaa98bc0960108dd4b9cc1aef7/regex-2026.4.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:54a1189ad9d9357760557c91103d5e421f0a2dabe68a5cdf9103d0dcf4e00752", size = 291909, upload-time = "2026-04-03T20:55:17.558Z" }, + { url = "https://files.pythonhosted.org/packages/b4/d6/5cfbfc97f3201a4d24b596a77957e092030dcc4205894bc035cedcfce62f/regex-2026.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:76d67d5afb1fe402d10a6403bae668d000441e2ab115191a804287d53b772951", size = 289692, upload-time = "2026-04-03T20:55:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/f2212d9fd56fe897e36d0110ba30ba2d247bd6410c5bd98499c7e5a1e1f2/regex-2026.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e7cd3e4ee8d80447a83bbc9ab0c8459781fa77087f856c3e740d7763be0df27f", size = 796979, upload-time = "2026-04-03T20:55:22.56Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e3/a016c12675fbac988a60c7e1c16e67823ff0bc016beb27bd7a001dbdabc6/regex-2026.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e19e18c568d2866d8b6a6dfad823db86193503f90823a8f66689315ba28fbe8", size = 866744, upload-time = "2026-04-03T20:55:24.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/0b90ca4cf17adc3cb43de80ec71018c37c88ad64987e8d0d481a95ca60b5/regex-2026.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7698a6f38730fd1385d390d1ed07bb13dce39aa616aca6a6d89bea178464b9a4", size = 911613, upload-time = "2026-04-03T20:55:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/8e/3b/2b3dac0b82d41ab43aa87c6ecde63d71189d03fe8854b8ca455a315edac3/regex-2026.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:173a66f3651cdb761018078e2d9487f4cf971232c990035ec0eb1cdc6bf929a9", size = 800551, upload-time = "2026-04-03T20:55:29.532Z" }, + { url = "https://files.pythonhosted.org/packages/25/fe/5365eb7aa0e753c4b5957815c321519ecab033c279c60e1b1ae2367fa810/regex-2026.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa7922bbb2cc84fa062d37723f199d4c0cd200245ce269c05db82d904db66b83", size = 776911, upload-time = "2026-04-03T20:55:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b3/7fb0072156bba065e3b778a7bc7b0a6328212be5dd6a86fd207e0c4f2dab/regex-2026.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:59f67cd0a0acaf0e564c20bbd7f767286f23e91e2572c5703bf3e56ea7557edb", size = 785751, upload-time = "2026-04-03T20:55:33.797Z" }, + { url = "https://files.pythonhosted.org/packages/02/1a/9f83677eb699273e56e858f7bd95acdbee376d42f59e8bfca2fd80d79df3/regex-2026.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:475e50f3f73f73614f7cba5524d6de49dee269df00272a1b85e3d19f6d498465", size = 860484, upload-time = "2026-04-03T20:55:35.745Z" }, + { url = "https://files.pythonhosted.org/packages/3b/7a/93937507b61cfcff8b4c5857f1b452852b09f741daa9acae15c971d8554e/regex-2026.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:a1c0c7d67b64d85ac2e1879923bad2f08a08f3004055f2f406ef73c850114bd4", size = 765939, upload-time = "2026-04-03T20:55:37.972Z" }, + { url = "https://files.pythonhosted.org/packages/86/ea/81a7f968a351c6552b1670ead861e2a385be730ee28402233020c67f9e0f/regex-2026.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:1371c2ccbb744d66ee63631cc9ca12aa233d5749972626b68fe1a649dd98e566", size = 851417, upload-time = "2026-04-03T20:55:39.92Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7e/323c18ce4b5b8f44517a36342961a0306e931e499febbd876bb149d900f0/regex-2026.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:59968142787042db793348a3f5b918cf24ced1f23247328530e063f89c128a95", size = 789056, upload-time = "2026-04-03T20:55:42.303Z" }, + { url = "https://files.pythonhosted.org/packages/c0/af/e7510f9b11b1913b0cd44eddb784b2d650b2af6515bfce4cffcc5bfd1d38/regex-2026.4.4-cp314-cp314-win32.whl", hash = "sha256:59efe72d37fd5a91e373e5146f187f921f365f4abc1249a5ab446a60f30dd5f8", size = 272130, upload-time = "2026-04-03T20:55:44.995Z" }, + { url = "https://files.pythonhosted.org/packages/9a/51/57dae534c915e2d3a21490e88836fa2ae79dde3b66255ecc0c0a155d2c10/regex-2026.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:e0aab3ff447845049d676827d2ff714aab4f73f340e155b7de7458cf53baa5a4", size = 280992, upload-time = "2026-04-03T20:55:47.316Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5e/abaf9f4c3792e34edb1434f06717fae2b07888d85cb5cec29f9204931bf8/regex-2026.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:a7a5bb6aa0cf62208bb4fa079b0c756734f8ad0e333b425732e8609bd51ee22f", size = 273563, upload-time = "2026-04-03T20:55:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/ff/06/35da85f9f217b9538b99cbb170738993bcc3b23784322decb77619f11502/regex-2026.4.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:97850d0638391bdc7d35dc1c1039974dcb921eaafa8cc935ae4d7f272b1d60b3", size = 494191, upload-time = "2026-04-03T20:55:51.258Z" }, + { url = "https://files.pythonhosted.org/packages/54/5b/1bc35f479eef8285c4baf88d8c002023efdeebb7b44a8735b36195486ae7/regex-2026.4.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ee7337f88f2a580679f7bbfe69dc86c043954f9f9c541012f49abc554a962f2e", size = 293877, upload-time = "2026-04-03T20:55:53.214Z" }, + { url = "https://files.pythonhosted.org/packages/39/5b/f53b9ad17480b3ddd14c90da04bfb55ac6894b129e5dea87bcaf7d00e336/regex-2026.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7429f4e6192c11d659900c0648ba8776243bf396ab95558b8c51a345afeddde6", size = 292410, upload-time = "2026-04-03T20:55:55.736Z" }, + { url = "https://files.pythonhosted.org/packages/bb/56/52377f59f60a7c51aa4161eecf0b6032c20b461805aca051250da435ffc9/regex-2026.4.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4f10fbd5dd13dcf4265b4cc07d69ca70280742870c97ae10093e3d66000359", size = 811831, upload-time = "2026-04-03T20:55:57.802Z" }, + { url = "https://files.pythonhosted.org/packages/dd/63/8026310bf066f702a9c361f83a8c9658f3fe4edb349f9c1e5d5273b7c40c/regex-2026.4.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a152560af4f9742b96f3827090f866eeec5becd4765c8e0d3473d9d280e76a5a", size = 871199, upload-time = "2026-04-03T20:56:00.333Z" }, + { url = "https://files.pythonhosted.org/packages/20/9f/a514bbb00a466dbb506d43f187a04047f7be1505f10a9a15615ead5080ee/regex-2026.4.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54170b3e95339f415d54651f97df3bff7434a663912f9358237941bbf9143f55", size = 917649, upload-time = "2026-04-03T20:56:02.445Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6b/8399f68dd41a2030218839b9b18360d79b86d22b9fab5ef477c7f23ca67c/regex-2026.4.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:07f190d65f5a72dcb9cf7106bfc3d21e7a49dd2879eda2207b683f32165e4d99", size = 816388, upload-time = "2026-04-03T20:56:04.595Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/103963f47c24339a483b05edd568594c2be486188f688c0170fd504b2948/regex-2026.4.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9a2741ce5a29d3c84b0b94261ba630ab459a1b847a0d6beca7d62d188175c790", size = 785746, upload-time = "2026-04-03T20:56:07.13Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ee/7f6054c0dec0cee3463c304405e4ff42e27cff05bf36fcb34be549ab17bd/regex-2026.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b26c30df3a28fd9793113dac7385a4deb7294a06c0f760dd2b008bd49a9139bc", size = 801483, upload-time = "2026-04-03T20:56:09.365Z" }, + { url = "https://files.pythonhosted.org/packages/30/c2/51d3d941cf6070dc00c3338ecf138615fc3cce0421c3df6abe97a08af61a/regex-2026.4.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:421439d1bee44b19f4583ccf42670ca464ffb90e9fdc38d37f39d1ddd1e44f1f", size = 866331, upload-time = "2026-04-03T20:56:12.039Z" }, + { url = "https://files.pythonhosted.org/packages/16/e8/76d50dcc122ac33927d939f350eebcfe3dbcbda96913e03433fc36de5e63/regex-2026.4.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:b40379b53ecbc747fd9bdf4a0ea14eb8188ca1bd0f54f78893a39024b28f4863", size = 772673, upload-time = "2026-04-03T20:56:14.558Z" }, + { url = "https://files.pythonhosted.org/packages/a5/6e/5f6bf75e20ea6873d05ba4ec78378c375cbe08cdec571c83fbb01606e563/regex-2026.4.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:08c55c13d2eef54f73eeadc33146fb0baaa49e7335eb1aff6ae1324bf0ddbe4a", size = 857146, upload-time = "2026-04-03T20:56:16.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/33/3c76d9962949e487ebba353a18e89399f292287204ac8f2f4cfc3a51c233/regex-2026.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9776b85f510062f5a75ef112afe5f494ef1635607bf1cc220c1391e9ac2f5e81", size = 803463, upload-time = "2026-04-03T20:56:18.923Z" }, + { url = "https://files.pythonhosted.org/packages/19/eb/ef32dcd2cb69b69bc0c3e55205bce94a7def48d495358946bc42186dcccc/regex-2026.4.4-cp314-cp314t-win32.whl", hash = "sha256:385edaebde5db5be103577afc8699fea73a0e36a734ba24870be7ffa61119d74", size = 275709, upload-time = "2026-04-03T20:56:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/a0/86/c291bf740945acbf35ed7dbebf8e2eea2f3f78041f6bd7cdab80cb274dc0/regex-2026.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:5d354b18839328927832e2fa5f7c95b7a3ccc39e7a681529e1685898e6436d45", size = 285622, upload-time = "2026-04-03T20:56:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e7/ec846d560ae6a597115153c02ca6138a7877a1748b2072d9521c10a93e58/regex-2026.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:af0384cb01a33600c49505c27c6c57ab0b27bf84a74e28524c92ca897ebdac9d", size = 275773, upload-time = "2026-04-03T20:56:26.07Z" }, ] [[package]] name = "requests" -version = "2.32.5" +version = "2.33.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -1564,9 +2161,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, ] [[package]] @@ -1584,15 +2181,15 @@ wheels = [ [[package]] name = "rich" -version = "14.2.0" +version = "15.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/8f/0722ca900cc807c13a6a0c696dacf35430f72e0ec571c4275d2371fca3e9/rich-15.0.0.tar.gz", hash = "sha256:edd07a4824c6b40189fb7ac9bc4c52536e9780fbbfbddf6f1e2502c31b068c36", size = 230680, upload-time = "2026-04-12T08:24:00.75Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, + { url = "https://files.pythonhosted.org/packages/82/3b/64d4899d73f91ba49a8c18a8ff3f0ea8f1c1d75481760df8c68ef5235bf5/rich-15.0.0-py3-none-any.whl", hash = "sha256:33bd4ef74232fb73fe9279a257718407f169c09b78a87ad3d296f548e27de0bb", size = 310654, upload-time = "2026-04-12T08:24:02.83Z" }, ] [[package]] @@ -1676,18 +2273,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, ] -[[package]] -name = "rsa" -version = "4.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyasn1" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, -] - [[package]] name = "shellingham" version = "1.5.4" @@ -1708,28 +2293,37 @@ wheels = [ [[package]] name = "sse-starlette" -version = "3.1.2" +version = "3.3.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/34/f5df66cb383efdbf4f2db23cabb27f51b1dcb737efaf8a558f6f1d195134/sse_starlette-3.1.2.tar.gz", hash = "sha256:55eff034207a83a0eb86de9a68099bd0157838f0b8b999a1b742005c71e33618", size = 26303, upload-time = "2025-12-31T08:02:20.023Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/8c/f9290339ef6d79badbc010f067cd769d6601ec11a57d78569c683fb4dd87/sse_starlette-3.3.4.tar.gz", hash = "sha256:aaf92fc067af8a5427192895ac028e947b484ac01edbc3caf00e7e7137c7bef1", size = 32427, upload-time = "2026-03-29T09:00:23.307Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/95/8c4b76eec9ae574474e5d2997557cebf764bcd3586458956c30631ae08f4/sse_starlette-3.1.2-py3-none-any.whl", hash = "sha256:cd800dd349f4521b317b9391d3796fa97b71748a4da9b9e00aafab32dda375c8", size = 12484, upload-time = "2025-12-31T08:02:18.894Z" }, + { url = "https://files.pythonhosted.org/packages/f8/7f/3de5402f39890ac5660b86bcf5c03f9d855dad5c4ed764866d7b592b46fd/sse_starlette-3.3.4-py3-none-any.whl", hash = "sha256:84bb06e58939a8b38d8341f1bc9792f06c2b53f48c608dd207582b664fc8f3c1", size = 14330, upload-time = "2026-03-29T09:00:21.846Z" }, ] [[package]] name = "starlette" -version = "0.50.0" +version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, +] + +[[package]] +name = "std-uritemplate" +version = "2.0.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/93/62/61866776cd32df3f984ff2f79b1428e10700e0a33ca7a7536e3fcba3cf2a/std_uritemplate-2.0.8.tar.gz", hash = "sha256:138ceff2c5bfef18a650372a5e8c82fe7f780c87235513de6c342fb5f7e18347", size = 6018, upload-time = "2025-10-16T15:51:29.774Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/64/97/b4f2f442fee92a1406f08b4fbc990bd7d02dc84b3b5e6315a59fa9b2a9f4/std_uritemplate-2.0.8-py3-none-any.whl", hash = "sha256:839807a7f9d07f0bad1a88977c3428bd97b9ff0d229412a0bf36123d8c724257", size = 6512, upload-time = "2025-10-16T15:51:28.713Z" }, ] [[package]] @@ -1781,29 +2375,30 @@ wheels = [ [[package]] name = "tqdm" -version = "4.67.1" +version = "4.67.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/a9/6ba95a270c6f1fbcd8dac228323f2777d886cb206987444e4bce66338dd4/tqdm-4.67.3.tar.gz", hash = "sha256:7d825f03f89244ef73f1d4ce193cb1774a8179fd96f31d7e1dcde62092b960bb", size = 169598, upload-time = "2026-02-03T17:35:53.048Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/16/e1/3079a9ff9b8e11b846c6ac5c8b5bfb7ff225eee721825310c91b3b50304f/tqdm-4.67.3-py3-none-any.whl", hash = "sha256:ee1e4c0e59148062281c49d80b25b67771a127c85fc9676d3be5f243206826bf", size = 78374, upload-time = "2026-02-03T17:35:50.982Z" }, ] [[package]] name = "typeagent" -version = "0.3.3" +version = "0.4.0.dev0" source = { editable = "." } dependencies = [ { name = "azure-identity" }, - { name = "black" }, { name = "colorama" }, { name = "mcp", extra = ["cli"] }, { name = "numpy" }, { name = "openai" }, { name = "pydantic" }, + { name = "pydantic-ai-slim", extra = ["openai"] }, { name = "pyreadline3", marker = "sys_platform == 'win32'" }, + { name = "pyright" }, { name = "python-dotenv" }, { name = "tiktoken" }, { name = "typechat" }, @@ -1820,14 +2415,15 @@ logfire = [ dev = [ { name = "azure-mgmt-authorization" }, { name = "azure-mgmt-keyvault" }, + { name = "black" }, { name = "coverage" }, { name = "google-api-python-client" }, { name = "google-auth-httplib2" }, { name = "google-auth-oauthlib" }, { name = "isort" }, { name = "logfire" }, + { name = "msgraph-sdk" }, { name = "opentelemetry-instrumentation-httpx" }, - { name = "pydantic-ai-slim", extra = ["openai"] }, { name = "pyright" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -1837,7 +2433,6 @@ dev = [ [package.metadata] requires-dist = [ { name = "azure-identity", specifier = ">=1.22.0" }, - { name = "black", specifier = ">=25.12.0" }, { name = "colorama", specifier = ">=0.4.6" }, { name = "logfire", marker = "extra == 'logfire'", specifier = ">=4.1.0" }, { name = "mcp", extras = ["cli"], specifier = ">=1.12.1" }, @@ -1845,7 +2440,9 @@ requires-dist = [ { name = "openai", specifier = ">=1.81.0" }, { name = "opentelemetry-instrumentation-httpx", marker = "extra == 'logfire'", specifier = ">=0.57b0" }, { name = "pydantic", specifier = ">=2.11.4" }, + { name = "pydantic-ai-slim", extras = ["openai"], specifier = ">=1.39.0" }, { name = "pyreadline3", marker = "sys_platform == 'win32'", specifier = ">=3.5.4" }, + { name = "pyright", specifier = ">=1.1.409" }, { name = "python-dotenv", specifier = ">=1.1.0" }, { name = "tiktoken", specifier = ">=0.12.0" }, { name = "typechat", specifier = ">=0.0.4" }, @@ -1857,14 +2454,15 @@ provides-extras = ["logfire"] dev = [ { name = "azure-mgmt-authorization", specifier = ">=4.0.0" }, { name = "azure-mgmt-keyvault", specifier = ">=12.1.1" }, + { name = "black", specifier = ">=25.12.0" }, { name = "coverage", extras = ["toml"], specifier = ">=7.9.1" }, { name = "google-api-python-client", specifier = ">=2.184.0" }, { name = "google-auth-httplib2", specifier = ">=0.2.0" }, { name = "google-auth-oauthlib", specifier = ">=1.2.2" }, { name = "isort", specifier = ">=7.0.0" }, { name = "logfire", specifier = ">=4.1.0" }, + { name = "msgraph-sdk", specifier = ">=1.54.0" }, { name = "opentelemetry-instrumentation-httpx", specifier = ">=0.57b0" }, - { name = "pydantic-ai-slim", extras = ["openai"], specifier = ">=1.39.0" }, { name = "pyright", specifier = ">=1.1.408" }, { name = "pytest", specifier = ">=8.3.5" }, { name = "pytest-asyncio", specifier = ">=0.26.0" }, @@ -1888,17 +2486,17 @@ wheels = [ [[package]] name = "typer" -version = "0.21.1" +version = "0.24.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "annotated-doc" }, { name = "click" }, { name = "rich" }, { name = "shellingham" }, - { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" }, + { url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" }, ] [[package]] @@ -1942,15 +2540,15 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.40.0" +version = "0.44.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/da/6eee1ff8b6cbeed47eeb5229749168e81eb4b7b999a1a15a7176e51410c9/uvicorn-0.44.0.tar.gz", hash = "sha256:6c942071b68f07e178264b9152f1f16dfac5da85880c4ce06366a96d70d4f31e", size = 86947, upload-time = "2026-04-06T09:23:22.826Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/b7/23/a5bbd9600dd607411fa644c06ff4951bec3a4d82c4b852374024359c19c0/uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89", size = 69425, upload-time = "2026-04-06T09:23:21.524Z" }, ] [[package]] @@ -2011,6 +2609,110 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] +[[package]] +name = "yarl" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, +] + [[package]] name = "zipp" version = "3.23.0"